create turso_parser crate

This commit is contained in:
TcMits
2025-08-06 15:42:51 +07:00
parent 34fdec82b9
commit 436d21c81d
14 changed files with 972 additions and 335 deletions

15
Cargo.lock generated
View File

@@ -3952,6 +3952,21 @@ dependencies = [
"turso_core",
]
[[package]]
name = "turso_parser"
version = "0.1.3"
dependencies = [
"bitflags 2.9.0",
"criterion",
"fallible-iterator",
"miette",
"pprof",
"serde",
"strum",
"strum_macros",
"turso_sqlite3_parser",
]
[[package]]
name = "turso_sqlite3_parser"
version = "0.1.3"

View File

@@ -26,7 +26,7 @@ members = [
"testing/sqlite_test_ext",
"tests",
"vendored/sqlite3-parser/sqlparser_bench",
"packages/turso-sync",
"packages/turso-sync", "parser",
]
exclude = ["perf/latency/limbo"]

View File

@@ -111,7 +111,3 @@ harness = false
[[bench]]
name = "tpc_h_benchmark"
harness = false
[[bench]]
name = "parser_benchmark"
harness = false

View File

@@ -1,134 +1,6 @@
use std::collections::HashMap;
use std::fs::{self, File};
use std::io::{BufWriter, Write};
use std::fs;
use std::path::PathBuf;
/// generates a trie-like function with nested match expressions for parsing SQL keywords
/// example: input: [["ABORT", "TK_ABORT"], ["ACTION", "TK_ACTION"], ["ADD", "TK_ADD"],]
/// A
/// ├─ B
/// │ ├─ O
/// │ │ ├─ R
/// │ │ │ ├─ T -> TK_ABORT
/// ├─ C
/// │ ├─ T
/// │ │ ├─ I
/// │ │ │ ├─ O
/// │ │ │ │ ├─ N -> TK_ACTION
/// ├─ D
/// │ ├─ D -> TK_ADD
fn build_keyword_map(
writer: &mut impl Write,
func_name: &str,
keywords: &[[&'static str; 2]],
) -> Result<(), std::io::Error> {
assert!(!keywords.is_empty());
let mut min_len = keywords[0][0].len();
let mut max_len = keywords[0][0].len();
struct PathEntry {
result: Option<&'static str>,
sub_entries: HashMap<u8, Box<PathEntry>>,
}
let mut paths = Box::new(PathEntry {
result: None,
sub_entries: HashMap::new(),
});
for keyword in keywords {
let keyword_b = keyword[0].as_bytes();
if keyword_b.len() < min_len {
min_len = keyword_b.len();
}
if keyword_b.len() > max_len {
max_len = keyword_b.len();
}
let mut current = &mut paths;
for &b in keyword_b {
let upper_b = b.to_ascii_uppercase();
match current.sub_entries.get(&upper_b) {
Some(_) => {
current = current.sub_entries.get_mut(&upper_b).unwrap();
}
None => {
let new_entry = Box::new(PathEntry {
result: None,
sub_entries: HashMap::new(),
});
current.sub_entries.insert(upper_b, new_entry);
current = current.sub_entries.get_mut(&upper_b).unwrap();
}
}
}
assert!(current.result.is_none());
current.result = Some(keyword[1]);
}
fn write_entry(writer: &mut impl Write, entry: &PathEntry) -> Result<(), std::io::Error> {
if let Some(result) = entry.result {
writeln!(writer, "if idx == buf.len() {{")?;
writeln!(writer, "return Some(TokenType::{result});")?;
writeln!(writer, "}}")?;
}
if entry.sub_entries.is_empty() {
writeln!(writer, "None")?;
return Ok(());
}
writeln!(writer, "if idx >= buf.len() {{")?;
writeln!(writer, "return None;")?;
writeln!(writer, "}}")?;
writeln!(writer, "match buf[idx] {{")?;
for (&b, sub_entry) in &entry.sub_entries {
if b.is_ascii_alphabetic() {
writeln!(writer, "{} | {} => {{", b, b.to_ascii_lowercase())?;
} else {
writeln!(writer, "{b} => {{")?;
}
writeln!(writer, "idx += 1;")?;
write_entry(writer, sub_entry)?;
writeln!(writer, "}}")?;
}
writeln!(writer, "_ => None")?;
writeln!(writer, "}}")?;
Ok(())
}
writeln!(
writer,
"pub(crate) const MAX_KEYWORD_LEN: usize = {max_len};"
)?;
writeln!(
writer,
"pub(crate) const MIN_KEYWORD_LEN: usize = {min_len};"
)?;
writeln!(writer, "/// Check if `word` is a keyword")?;
writeln!(
writer,
"pub fn {func_name}(buf: &[u8]) -> Option<TokenType> {{"
)?;
writeln!(
writer,
"if buf.len() < MIN_KEYWORD_LEN || buf.len() > MAX_KEYWORD_LEN {{"
)?;
writeln!(writer, "return None;")?;
writeln!(writer, "}}")?;
writeln!(writer, "let mut idx = 0;")?;
write_entry(writer, &paths)?;
writeln!(writer, "}}")?;
Ok(())
}
fn main() {
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let built_file = out_dir.join("built.rs");
@@ -146,161 +18,4 @@ fn main() {
),
)
.expect("Failed to append to built file");
let keywords = out_dir.join("keywords.rs");
let mut keywords = BufWriter::new(File::create(keywords).unwrap());
build_keyword_map(
&mut keywords,
"keyword_token",
&[
["ABORT", "TK_ABORT"],
["ACTION", "TK_ACTION"],
["ADD", "TK_ADD"],
["AFTER", "TK_AFTER"],
["ALL", "TK_ALL"],
["ALTER", "TK_ALTER"],
["ALWAYS", "TK_ALWAYS"],
["ANALYZE", "TK_ANALYZE"],
["AND", "TK_AND"],
["AS", "TK_AS"],
["ASC", "TK_ASC"],
["ATTACH", "TK_ATTACH"],
["AUTOINCREMENT", "TK_AUTOINCR"],
["BEFORE", "TK_BEFORE"],
["BEGIN", "TK_BEGIN"],
["BETWEEN", "TK_BETWEEN"],
["BY", "TK_BY"],
["CASCADE", "TK_CASCADE"],
["CASE", "TK_CASE"],
["CAST", "TK_CAST"],
["CHECK", "TK_CHECK"],
["COLLATE", "TK_COLLATE"],
["COLUMN", "TK_COLUMNKW"],
["COMMIT", "TK_COMMIT"],
["CONFLICT", "TK_CONFLICT"],
["CONSTRAINT", "TK_CONSTRAINT"],
["CREATE", "TK_CREATE"],
["CROSS", "TK_JOIN_KW"],
["CURRENT", "TK_CURRENT"],
["CURRENT_DATE", "TK_CTIME_KW"],
["CURRENT_TIME", "TK_CTIME_KW"],
["CURRENT_TIMESTAMP", "TK_CTIME_KW"],
["DATABASE", "TK_DATABASE"],
["DEFAULT", "TK_DEFAULT"],
["DEFERRABLE", "TK_DEFERRABLE"],
["DEFERRED", "TK_DEFERRED"],
["DELETE", "TK_DELETE"],
["DESC", "TK_DESC"],
["DETACH", "TK_DETACH"],
["DISTINCT", "TK_DISTINCT"],
["DO", "TK_DO"],
["DROP", "TK_DROP"],
["EACH", "TK_EACH"],
["ELSE", "TK_ELSE"],
["END", "TK_END"],
["ESCAPE", "TK_ESCAPE"],
["EXCEPT", "TK_EXCEPT"],
["EXCLUDE", "TK_EXCLUDE"],
["EXCLUSIVE", "TK_EXCLUSIVE"],
["EXISTS", "TK_EXISTS"],
["EXPLAIN", "TK_EXPLAIN"],
["FAIL", "TK_FAIL"],
["FILTER", "TK_FILTER"],
["FIRST", "TK_FIRST"],
["FOLLOWING", "TK_FOLLOWING"],
["FOR", "TK_FOR"],
["FOREIGN", "TK_FOREIGN"],
["FROM", "TK_FROM"],
["FULL", "TK_JOIN_KW"],
["GENERATED", "TK_GENERATED"],
["GLOB", "TK_LIKE_KW"],
["GROUP", "TK_GROUP"],
["GROUPS", "TK_GROUPS"],
["HAVING", "TK_HAVING"],
["IF", "TK_IF"],
["IGNORE", "TK_IGNORE"],
["IMMEDIATE", "TK_IMMEDIATE"],
["IN", "TK_IN"],
["INDEX", "TK_INDEX"],
["INDEXED", "TK_INDEXED"],
["INITIALLY", "TK_INITIALLY"],
["INNER", "TK_JOIN_KW"],
["INSERT", "TK_INSERT"],
["INSTEAD", "TK_INSTEAD"],
["INTERSECT", "TK_INTERSECT"],
["INTO", "TK_INTO"],
["IS", "TK_IS"],
["ISNULL", "TK_ISNULL"],
["JOIN", "TK_JOIN"],
["KEY", "TK_KEY"],
["LAST", "TK_LAST"],
["LEFT", "TK_JOIN_KW"],
["LIKE", "TK_LIKE_KW"],
["LIMIT", "TK_LIMIT"],
["MATCH", "TK_MATCH"],
["MATERIALIZED", "TK_MATERIALIZED"],
["NATURAL", "TK_JOIN_KW"],
["NO", "TK_NO"],
["NOT", "TK_NOT"],
["NOTHING", "TK_NOTHING"],
["NOTNULL", "TK_NOTNULL"],
["NULL", "TK_NULL"],
["NULLS", "TK_NULLS"],
["OF", "TK_OF"],
["OFFSET", "TK_OFFSET"],
["ON", "TK_ON"],
["OR", "TK_OR"],
["ORDER", "TK_ORDER"],
["OTHERS", "TK_OTHERS"],
["OUTER", "TK_JOIN_KW"],
["OVER", "TK_OVER"],
["PARTITION", "TK_PARTITION"],
["PLAN", "TK_PLAN"],
["PRAGMA", "TK_PRAGMA"],
["PRECEDING", "TK_PRECEDING"],
["PRIMARY", "TK_PRIMARY"],
["QUERY", "TK_QUERY"],
["RAISE", "TK_RAISE"],
["RANGE", "TK_RANGE"],
["RECURSIVE", "TK_RECURSIVE"],
["REFERENCES", "TK_REFERENCES"],
["REGEXP", "TK_LIKE_KW"],
["REINDEX", "TK_REINDEX"],
["RELEASE", "TK_RELEASE"],
["RENAME", "TK_RENAME"],
["REPLACE", "TK_REPLACE"],
["RETURNING", "TK_RETURNING"],
["RESTRICT", "TK_RESTRICT"],
["RIGHT", "TK_JOIN_KW"],
["ROLLBACK", "TK_ROLLBACK"],
["ROW", "TK_ROW"],
["ROWS", "TK_ROWS"],
["SAVEPOINT", "TK_SAVEPOINT"],
["SELECT", "TK_SELECT"],
["SET", "TK_SET"],
["TABLE", "TK_TABLE"],
["TEMP", "TK_TEMP"],
["TEMPORARY", "TK_TEMP"],
["THEN", "TK_THEN"],
["TIES", "TK_TIES"],
["TO", "TK_TO"],
["TRANSACTION", "TK_TRANSACTION"],
["TRIGGER", "TK_TRIGGER"],
["UNBOUNDED", "TK_UNBOUNDED"],
["UNION", "TK_UNION"],
["UNIQUE", "TK_UNIQUE"],
["UPDATE", "TK_UPDATE"],
["USING", "TK_USING"],
["VACUUM", "TK_VACUUM"],
["VALUES", "TK_VALUES"],
["VIEW", "TK_VIEW"],
["VIRTUAL", "TK_VIRTUAL"],
["WHEN", "TK_WHEN"],
["WHERE", "TK_WHERE"],
["WINDOW", "TK_WINDOW"],
["WITH", "TK_WITH"],
["WITHOUT", "TK_WITHOUT"],
],
)
.unwrap();
}

View File

@@ -30,7 +30,6 @@ mod uuid;
mod vdbe;
mod vector;
mod vtab;
pub mod parser;
#[cfg(feature = "fuzz")]
pub mod numeric;

34
parser/Cargo.toml Normal file
View File

@@ -0,0 +1,34 @@
[package]
name = "turso_parser"
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
description = "The Turso parser library"
[lib]
name = "turso_parser"
[features]
default = []
serde = ["dep:serde", "bitflags/serde"]
[dependencies]
bitflags = "2.0"
miette = "7.4.0"
strum = { workspace = true }
strum_macros = {workspace = true }
serde = { workspace = true , optional = true, features = ["derive"] }
[dev-dependencies]
fallible-iterator = "0.3"
criterion = { version = "0.5", features = ["html_reports" ] }
turso_sqlite3_parser = { workspace = true }
[target.'cfg(not(target_family = "windows"))'.dev-dependencies]
pprof = { version = "0.14.0", features = ["criterion", "flamegraph"] }
[[bench]]
name = "parser_benchmark"
harness = false

View File

@@ -1,7 +1,7 @@
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use fallible_iterator::FallibleIterator;
use pprof::criterion::{Output, PProfProfiler};
use turso_core::parser::{lexer::Lexer, parser::Parser};
use turso_parser::{lexer::Lexer, parser::Parser};
use turso_sqlite3_parser::lexer::{
sql::{Parser as OldParser, Tokenizer},
Scanner,

290
parser/build.rs Normal file
View File

@@ -0,0 +1,290 @@
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::PathBuf;
/// generates a trie-like function with nested match expressions for parsing SQL keywords
/// example: input: [["ABORT", "TK_ABORT"], ["ACTION", "TK_ACTION"], ["ADD", "TK_ADD"],]
/// A
/// ├─ B
/// │ ├─ O
/// │ │ ├─ R
/// │ │ │ ├─ T -> TK_ABORT
/// ├─ C
/// │ ├─ T
/// │ │ ├─ I
/// │ │ │ ├─ O
/// │ │ │ │ ├─ N -> TK_ACTION
/// ├─ D
/// │ ├─ D -> TK_ADD
fn build_keyword_map(
writer: &mut impl Write,
func_name: &str,
keywords: &[[&'static str; 2]],
) -> Result<(), std::io::Error> {
assert!(!keywords.is_empty());
let mut min_len = keywords[0][0].len();
let mut max_len = keywords[0][0].len();
struct PathEntry {
result: Option<&'static str>,
sub_entries: HashMap<u8, Box<PathEntry>>,
}
let mut paths = Box::new(PathEntry {
result: None,
sub_entries: HashMap::new(),
});
for keyword in keywords {
let keyword_b = keyword[0].as_bytes();
if keyword_b.len() < min_len {
min_len = keyword_b.len();
}
if keyword_b.len() > max_len {
max_len = keyword_b.len();
}
let mut current = &mut paths;
for &b in keyword_b {
let upper_b = b.to_ascii_uppercase();
match current.sub_entries.get(&upper_b) {
Some(_) => {
current = current.sub_entries.get_mut(&upper_b).unwrap();
}
None => {
let new_entry = Box::new(PathEntry {
result: None,
sub_entries: HashMap::new(),
});
current.sub_entries.insert(upper_b, new_entry);
current = current.sub_entries.get_mut(&upper_b).unwrap();
}
}
}
assert!(current.result.is_none());
current.result = Some(keyword[1]);
}
fn write_entry(writer: &mut impl Write, entry: &PathEntry) -> Result<(), std::io::Error> {
if let Some(result) = entry.result {
writeln!(writer, "if idx == buf.len() {{")?;
writeln!(writer, "return Some(TokenType::{result});")?;
writeln!(writer, "}}")?;
}
if entry.sub_entries.is_empty() {
writeln!(writer, "None")?;
return Ok(());
}
writeln!(writer, "if idx >= buf.len() {{")?;
writeln!(writer, "return None;")?;
writeln!(writer, "}}")?;
writeln!(writer, "match buf[idx] {{")?;
for (&b, sub_entry) in &entry.sub_entries {
if b.is_ascii_alphabetic() {
writeln!(writer, "{} | {} => {{", b, b.to_ascii_lowercase())?;
} else {
writeln!(writer, "{b} => {{")?;
}
writeln!(writer, "idx += 1;")?;
write_entry(writer, sub_entry)?;
writeln!(writer, "}}")?;
}
writeln!(writer, "_ => None")?;
writeln!(writer, "}}")?;
Ok(())
}
writeln!(
writer,
"pub(crate) const MAX_KEYWORD_LEN: usize = {max_len};"
)?;
writeln!(
writer,
"pub(crate) const MIN_KEYWORD_LEN: usize = {min_len};"
)?;
writeln!(writer, "/// Check if `word` is a keyword")?;
writeln!(
writer,
"pub fn {func_name}(buf: &[u8]) -> Option<TokenType> {{"
)?;
writeln!(
writer,
"if buf.len() < MIN_KEYWORD_LEN || buf.len() > MAX_KEYWORD_LEN {{"
)?;
writeln!(writer, "return None;")?;
writeln!(writer, "}}")?;
writeln!(writer, "let mut idx = 0;")?;
write_entry(writer, &paths)?;
writeln!(writer, "}}")?;
Ok(())
}
fn main() {
let out_dir = PathBuf::from(std::env::var("OUT_DIR").unwrap());
let keywords = out_dir.join("keywords.rs");
let mut keywords = BufWriter::new(File::create(keywords).unwrap());
build_keyword_map(
&mut keywords,
"keyword_token",
&[
["ABORT", "TK_ABORT"],
["ACTION", "TK_ACTION"],
["ADD", "TK_ADD"],
["AFTER", "TK_AFTER"],
["ALL", "TK_ALL"],
["ALTER", "TK_ALTER"],
["ALWAYS", "TK_ALWAYS"],
["ANALYZE", "TK_ANALYZE"],
["AND", "TK_AND"],
["AS", "TK_AS"],
["ASC", "TK_ASC"],
["ATTACH", "TK_ATTACH"],
["AUTOINCREMENT", "TK_AUTOINCR"],
["BEFORE", "TK_BEFORE"],
["BEGIN", "TK_BEGIN"],
["BETWEEN", "TK_BETWEEN"],
["BY", "TK_BY"],
["CASCADE", "TK_CASCADE"],
["CASE", "TK_CASE"],
["CAST", "TK_CAST"],
["CHECK", "TK_CHECK"],
["COLLATE", "TK_COLLATE"],
["COLUMN", "TK_COLUMNKW"],
["COMMIT", "TK_COMMIT"],
["CONFLICT", "TK_CONFLICT"],
["CONSTRAINT", "TK_CONSTRAINT"],
["CREATE", "TK_CREATE"],
["CROSS", "TK_JOIN_KW"],
["CURRENT", "TK_CURRENT"],
["CURRENT_DATE", "TK_CTIME_KW"],
["CURRENT_TIME", "TK_CTIME_KW"],
["CURRENT_TIMESTAMP", "TK_CTIME_KW"],
["DATABASE", "TK_DATABASE"],
["DEFAULT", "TK_DEFAULT"],
["DEFERRABLE", "TK_DEFERRABLE"],
["DEFERRED", "TK_DEFERRED"],
["DELETE", "TK_DELETE"],
["DESC", "TK_DESC"],
["DETACH", "TK_DETACH"],
["DISTINCT", "TK_DISTINCT"],
["DO", "TK_DO"],
["DROP", "TK_DROP"],
["EACH", "TK_EACH"],
["ELSE", "TK_ELSE"],
["END", "TK_END"],
["ESCAPE", "TK_ESCAPE"],
["EXCEPT", "TK_EXCEPT"],
["EXCLUDE", "TK_EXCLUDE"],
["EXCLUSIVE", "TK_EXCLUSIVE"],
["EXISTS", "TK_EXISTS"],
["EXPLAIN", "TK_EXPLAIN"],
["FAIL", "TK_FAIL"],
["FILTER", "TK_FILTER"],
["FIRST", "TK_FIRST"],
["FOLLOWING", "TK_FOLLOWING"],
["FOR", "TK_FOR"],
["FOREIGN", "TK_FOREIGN"],
["FROM", "TK_FROM"],
["FULL", "TK_JOIN_KW"],
["GENERATED", "TK_GENERATED"],
["GLOB", "TK_LIKE_KW"],
["GROUP", "TK_GROUP"],
["GROUPS", "TK_GROUPS"],
["HAVING", "TK_HAVING"],
["IF", "TK_IF"],
["IGNORE", "TK_IGNORE"],
["IMMEDIATE", "TK_IMMEDIATE"],
["IN", "TK_IN"],
["INDEX", "TK_INDEX"],
["INDEXED", "TK_INDEXED"],
["INITIALLY", "TK_INITIALLY"],
["INNER", "TK_JOIN_KW"],
["INSERT", "TK_INSERT"],
["INSTEAD", "TK_INSTEAD"],
["INTERSECT", "TK_INTERSECT"],
["INTO", "TK_INTO"],
["IS", "TK_IS"],
["ISNULL", "TK_ISNULL"],
["JOIN", "TK_JOIN"],
["KEY", "TK_KEY"],
["LAST", "TK_LAST"],
["LEFT", "TK_JOIN_KW"],
["LIKE", "TK_LIKE_KW"],
["LIMIT", "TK_LIMIT"],
["MATCH", "TK_MATCH"],
["MATERIALIZED", "TK_MATERIALIZED"],
["NATURAL", "TK_JOIN_KW"],
["NO", "TK_NO"],
["NOT", "TK_NOT"],
["NOTHING", "TK_NOTHING"],
["NOTNULL", "TK_NOTNULL"],
["NULL", "TK_NULL"],
["NULLS", "TK_NULLS"],
["OF", "TK_OF"],
["OFFSET", "TK_OFFSET"],
["ON", "TK_ON"],
["OR", "TK_OR"],
["ORDER", "TK_ORDER"],
["OTHERS", "TK_OTHERS"],
["OUTER", "TK_JOIN_KW"],
["OVER", "TK_OVER"],
["PARTITION", "TK_PARTITION"],
["PLAN", "TK_PLAN"],
["PRAGMA", "TK_PRAGMA"],
["PRECEDING", "TK_PRECEDING"],
["PRIMARY", "TK_PRIMARY"],
["QUERY", "TK_QUERY"],
["RAISE", "TK_RAISE"],
["RANGE", "TK_RANGE"],
["RECURSIVE", "TK_RECURSIVE"],
["REFERENCES", "TK_REFERENCES"],
["REGEXP", "TK_LIKE_KW"],
["REINDEX", "TK_REINDEX"],
["RELEASE", "TK_RELEASE"],
["RENAME", "TK_RENAME"],
["REPLACE", "TK_REPLACE"],
["RETURNING", "TK_RETURNING"],
["RESTRICT", "TK_RESTRICT"],
["RIGHT", "TK_JOIN_KW"],
["ROLLBACK", "TK_ROLLBACK"],
["ROW", "TK_ROW"],
["ROWS", "TK_ROWS"],
["SAVEPOINT", "TK_SAVEPOINT"],
["SELECT", "TK_SELECT"],
["SET", "TK_SET"],
["TABLE", "TK_TABLE"],
["TEMP", "TK_TEMP"],
["TEMPORARY", "TK_TEMP"],
["THEN", "TK_THEN"],
["TIES", "TK_TIES"],
["TO", "TK_TO"],
["TRANSACTION", "TK_TRANSACTION"],
["TRIGGER", "TK_TRIGGER"],
["UNBOUNDED", "TK_UNBOUNDED"],
["UNION", "TK_UNION"],
["UNIQUE", "TK_UNIQUE"],
["UPDATE", "TK_UPDATE"],
["USING", "TK_USING"],
["VACUUM", "TK_VACUUM"],
["VALUES", "TK_VALUES"],
["VIEW", "TK_VIEW"],
["VIRTUAL", "TK_VIRTUAL"],
["WHEN", "TK_WHEN"],
["WHERE", "TK_WHERE"],
["WINDOW", "TK_WINDOW"],
["WITH", "TK_WITH"],
["WITHOUT", "TK_WITHOUT"],
],
)
.unwrap();
}

View File

@@ -1,7 +1,7 @@
use std::error;
use std::fmt;
use super::token::TokenType;
use crate::token::TokenType;
/// SQL lexer and parser errors
#[non_exhaustive]

View File

@@ -1,4 +1,4 @@
use crate::parser::{error::Error, token::TokenType};
use crate::{error::Error, token::TokenType};
include!(concat!(env!("OUT_DIR"), "/keywords.rs"));
@@ -134,7 +134,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_while_number_digit(&mut self) -> Result<(), Error> {
loop {
let start = self.offset;
@@ -164,7 +163,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_while_number_hexdigit(&mut self) -> Result<(), Error> {
loop {
let start = self.offset;
@@ -194,7 +192,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_token(&mut self, size: usize, typ: TokenType) -> Token<'a> {
debug_assert!(size > 0);
debug_assert!(self.remaining().len() >= size);
@@ -207,7 +204,6 @@ impl<'a> Lexer<'a> {
tok
}
#[inline(always)]
fn eat_white_space(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b.is_ascii_whitespace());
@@ -218,7 +214,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_minus_or_comment_or_ptr(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b == b'-');
@@ -254,7 +249,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_slash_or_comment(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
self.eat_and_assert(|b| b == b'/');
@@ -294,7 +288,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_eq(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b == b'=');
@@ -308,7 +301,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_le_or_ne_or_lshift_or_lt(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b == b'<');
@@ -341,7 +333,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_ge_or_gt_or_rshift(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b == b'>');
@@ -367,7 +358,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_ne(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
self.eat_and_assert(|b| b == b'!');
@@ -384,7 +374,6 @@ impl<'a> Lexer<'a> {
})
}
#[inline(always)]
fn eat_concat_or_bitor(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_and_assert(|b| b == b'|');
@@ -402,7 +391,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_lit_or_id(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
let quote = self.eat().unwrap();
@@ -437,7 +425,6 @@ impl<'a> Lexer<'a> {
})
}
#[inline(always)]
fn eat_dot_or_frac(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
self.eat_and_assert(|b| b == b'.');
@@ -471,7 +458,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_expo(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
self.eat_and_assert(|b| b == b'e' || b == b'E');
@@ -502,7 +488,6 @@ impl<'a> Lexer<'a> {
})
}
#[inline(always)]
fn eat_number(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
let first_digit = self.eat().unwrap();
@@ -565,7 +550,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_bracket(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
self.eat_and_assert(|b| b == b'[');
@@ -583,7 +567,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_var(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
let tok = self.eat().unwrap();
@@ -621,7 +604,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_blob_or_id(&mut self) -> Result<Token<'a>, Error> {
let start = self.offset;
let start_char = self.eat().unwrap();
@@ -662,7 +644,6 @@ impl<'a> Lexer<'a> {
}
}
#[inline(always)]
fn eat_unrecognized(&mut self) -> Token<'a> {
let start = self.offset;
self.eat_while(|b| b.is_some() && !b.unwrap().is_ascii_whitespace());

View File

@@ -1,14 +1,14 @@
use crate::parser::ast::{
use crate::ast::{
As, Cmd, CommonTableExpr, CompoundOperator, CompoundSelect, CreateTableBody, Distinctness,
Expr, FrameBound, FrameClause, FrameExclude, FrameMode, FromClause, FunctionTail, GroupBy,
Indexed, IndexedColumn, JoinConstraint, JoinOperator, JoinType, JoinedSelectTable,
LikeOperator, Limit, Literal, Materialized, Name, NullsOrder, OneSelect, Operator, Over,
QualifiedName, ResultColumn, Select, SelectBody, SelectTable, SortOrder, SortedColumn, Stmt,
TransactionType, Type, TypeSize, UnaryOperator, Window, WindowDef, With,
QualifiedName, ResolveType, ResultColumn, Select, SelectBody, SelectTable, SortOrder,
SortedColumn, Stmt, TransactionType, Type, TypeSize, UnaryOperator, Window, WindowDef, With,
};
use crate::parser::error::Error;
use crate::parser::lexer::{Lexer, Token};
use crate::parser::token::TokenType;
use crate::error::Error;
use crate::lexer::{Lexer, Token};
use crate::token::TokenType;
#[inline(always)]
fn from_bytes_as_str(bytes: &[u8]) -> &str {
@@ -105,7 +105,6 @@ impl<'a> Parser<'a> {
}
// entrypoint of parsing
#[inline(always)]
fn next_cmd(&mut self) -> Result<Option<Cmd>, Error> {
// consumes prefix SEMI
while let Some(token) = self.peek()? {
@@ -184,7 +183,6 @@ impl<'a> Parser<'a> {
}
}
#[inline(always)]
fn next_token(&mut self) -> Result<Option<Token<'a>>, Error> {
debug_assert!(!self.peekable);
let mut next = self.consume_lexer_without_whitespaces_or_comments();
@@ -377,12 +375,12 @@ impl<'a> Parser<'a> {
}
#[inline(always)]
fn eat_assert(&mut self, expected: &'static [TokenType]) -> Token<'a> {
fn eat_assert(&mut self, _expected: &'static [TokenType]) -> Token<'a> {
let token = self.eat_no_eof().unwrap();
#[cfg(debug_assertions)]
{
for expected in expected {
for expected in _expected {
if token.token_type == Some(*expected) {
return token;
}
@@ -396,7 +394,7 @@ impl<'a> Parser<'a> {
panic!(
"Expected token {:?}, got {:?}",
expected,
_expected,
token.token_type.unwrap()
);
}
@@ -461,7 +459,6 @@ impl<'a> Parser<'a> {
}
}
#[inline(always)]
fn peek_nm(&mut self) -> Result<Token<'a>, Error> {
self.peek_expect(&[
TokenType::TK_ID,
@@ -471,7 +468,6 @@ impl<'a> Parser<'a> {
])
}
#[inline(always)]
fn parse_nm(&mut self) -> Name {
let tok = self.eat_assert(&[
TokenType::TK_ID,
@@ -782,7 +778,6 @@ impl<'a> Parser<'a> {
///
/// this function detect precedence by peeking first token of operator
/// after parsing a operand (binary operator)
fn current_token_precedence(&mut self) -> Result<Option<u8>, Error> {
let tok = self.peek()?;
if tok.is_none() {
@@ -1050,13 +1045,12 @@ impl<'a> Parser<'a> {
})
}
#[inline(always)] // this function is hot :)
fn parse_expr_operand(&mut self) -> Result<Box<Expr>, Error> {
let tok = self.peek_expect(&[
TokenType::TK_LP,
TokenType::TK_CAST,
TokenType::TK_CTIME_KW,
TokenType::TK_RAISE,
TokenType::TK_ID,
TokenType::TK_STRING,
TokenType::TK_INDEXED,
@@ -1077,9 +1071,18 @@ impl<'a> Parser<'a> {
match tok.token_type.unwrap() {
TokenType::TK_LP => {
self.eat_assert(&[TokenType::TK_LP]);
let exprs = self.parse_nexpr_list()?;
self.eat_expect(&[TokenType::TK_RP])?;
Ok(Box::new(Expr::Parenthesized(exprs)))
match self.peek_no_eof()?.token_type.unwrap() {
TokenType::TK_WITH | TokenType::TK_SELECT | TokenType::TK_VALUES => {
let select = self.parse_select()?;
self.eat_expect(&[TokenType::TK_RP])?;
Ok(Box::new(Expr::Subquery(select)))
}
_ => {
let exprs = self.parse_nexpr_list()?;
self.eat_expect(&[TokenType::TK_RP])?;
Ok(Box::new(Expr::Parenthesized(exprs)))
}
}
}
TokenType::TK_NULL => {
self.eat_assert(&[TokenType::TK_NULL]);
@@ -1166,7 +1169,11 @@ impl<'a> Parser<'a> {
None
};
let mut when_then_pairs = vec![];
self.eat_expect(&[TokenType::TK_WHEN])?;
let first_when = self.parse_expr(0)?;
self.eat_expect(&[TokenType::TK_THEN])?;
let mut when_then_pairs = vec![(first_when, self.parse_expr(0)?)];
loop {
if let Some(tok) = self.peek()? {
if tok.token_type.unwrap() != TokenType::TK_WHEN {
@@ -1194,12 +1201,41 @@ impl<'a> Parser<'a> {
None
};
self.eat_expect(&[TokenType::TK_END])?;
Ok(Box::new(Expr::Case {
base,
when_then_pairs,
else_expr,
}))
}
TokenType::TK_RAISE => {
self.eat_assert(&[TokenType::TK_RAISE]);
self.eat_expect(&[TokenType::TK_LP])?;
let tok = self.eat_expect(&[
TokenType::TK_IGNORE,
TokenType::TK_ROLLBACK,
TokenType::TK_ABORT,
TokenType::TK_FAIL,
])?;
let resolve = match tok.token_type.unwrap() {
TokenType::TK_IGNORE => ResolveType::Ignore,
TokenType::TK_ROLLBACK => ResolveType::Rollback,
TokenType::TK_ABORT => ResolveType::Abort,
TokenType::TK_FAIL => ResolveType::Fail,
_ => unreachable!(),
};
let expr = if resolve != ResolveType::Ignore {
self.eat_expect(&[TokenType::TK_COMMA])?;
Some(self.parse_expr(0)?)
} else {
None
};
Ok(Box::new(Expr::Raise(resolve, expr)))
}
_ => {
let can_be_lit_str = tok.token_type == Some(TokenType::TK_STRING);
debug_assert!(self.peek_nm().is_ok(), "Expected a name token");
@@ -2604,7 +2640,7 @@ mod tests {
name: Name::Ident("ABORT".to_string()),
})],
),
// test exprs
// test expr operand
(
b"SELECT 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
@@ -2627,6 +2663,577 @@ mod tests {
limit: None,
}))],
),
(
b"SELECT (1)".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Parenthesized(vec![Box::new(Expr::Literal(
Literal::Numeric("1".to_owned()),
))])),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT NULL".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::Null)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT X'ab'".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::Blob("ab".to_owned()))),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT 3.333".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::Numeric("3.333".to_owned()))),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT ?1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Variable("1".to_owned())),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CAST(1 AS INTEGER)".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Cast {
expr: Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
type_name: Some(Type {
name: "INTEGER".to_owned(),
size: None,
}),
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CAST(1 AS VARCHAR(255))".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Cast {
expr: Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
type_name: Some(Type {
name: "VARCHAR".to_owned(),
size: Some(TypeSize::MaxSize(Box::new(Expr::Literal(
Literal::Numeric("255".to_owned()),
)))),
}),
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CAST(1 AS DECIMAL(10, 5))".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Cast {
expr: Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
type_name: Some(Type {
name: "DECIMAL".to_owned(),
size: Some(TypeSize::TypeSize(
Box::new(Expr::Literal(Literal::Numeric(
"10".to_owned(),
))),
Box::new(Expr::Literal(Literal::Numeric(
"5".to_owned(),
))),
)),
}),
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CURRENT_DATE".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::CurrentDate)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CURRENT_TIME".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::CurrentTime)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CURRENT_TIMESTAMP".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::CurrentTimestamp)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT NOT 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Unary(
UnaryOperator::Not,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT NOT 1 + 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Unary(
UnaryOperator::Not,
Box::new(Expr::Binary(
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
Operator::Add,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT ~1 + 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Binary(
Box::new(Expr::Unary(
UnaryOperator::BitwiseNot,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
Operator::Add,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT +1 + 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Binary(
Box::new(Expr::Unary(
UnaryOperator::Positive,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
Operator::Add,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT -1 + 1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Binary(
Box::new(Expr::Unary(
UnaryOperator::Negative,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
Operator::Add,
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
)),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT EXISTS (SELECT 1)".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Exists(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Literal(Literal::Numeric(
"1".to_owned(),
))),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
})),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CASE WHEN 1 THEN 2 ELSE 3 END".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Case {
base: None,
when_then_pairs: vec![(
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
Box::new(Expr::Literal(Literal::Numeric("2".to_owned()))),
)],
else_expr: Some(Box::new(Expr::Literal(Literal::Numeric(
"3".to_owned(),
)))),
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CASE 4 WHEN 1 THEN 2 ELSE 3 END".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Case {
base: Some(Box::new(Expr::Literal(Literal::Numeric(
"4".to_owned(),
)))),
when_then_pairs: vec![(
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
Box::new(Expr::Literal(Literal::Numeric("2".to_owned()))),
)],
else_expr: Some(Box::new(Expr::Literal(Literal::Numeric(
"3".to_owned(),
)))),
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT CASE 4 WHEN 1 THEN 2 END".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Case {
base: Some(Box::new(Expr::Literal(Literal::Numeric(
"4".to_owned(),
)))),
when_then_pairs: vec![(
Box::new(Expr::Literal(Literal::Numeric("1".to_owned()))),
Box::new(Expr::Literal(Literal::Numeric("2".to_owned()))),
)],
else_expr: None,
}),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
(
b"SELECT col_1".as_slice(),
vec![Cmd::Stmt(Stmt::Select(Select {
with: None,
body: SelectBody {
select: OneSelect::Select {
distinctness: None,
columns: vec![ResultColumn::Expr(
Box::new(Expr::Column("col_1".to_owned())),
None,
)],
from: None,
where_clause: None,
group_by: None,
window_clause: vec![],
},
compounds: vec![],
},
order_by: vec![],
limit: None,
}))],
),
];
for (input, expected) in test_cases {