mirror of
https://github.com/aljazceru/turso.git
synced 2025-12-27 13:04:20 +01:00
Merge 'feat(core/translate): support HAVING' from Jussi Saurio
support the HAVING clause. note that sqlite (and i think standard sql?) supports HAVING even without GROUP BY, but `sqlite3-parser` doesn't. also fixes some issues with the PartialOrd implementation of OwnedValue and the implementations of `concat` and `round` which i discovered due to my HAVING tcl tests failing Closes #420
This commit is contained in:
16
COMPAT.md
16
COMPAT.md
@@ -2,11 +2,16 @@
|
||||
|
||||
This document describes the SQLite compatibility status of Limbo:
|
||||
|
||||
* [Limitations](#limitations)
|
||||
* [SQL statements](#sql-statements)
|
||||
* [SQL functions](#sql-functions)
|
||||
* [SQLite API](#sqlite-api)
|
||||
* [SQLite VDBE opcodes](#sqlite-vdbe-opcodes)
|
||||
- [SQLite Compatibility](#sqlite-compatibility)
|
||||
- [Limitations](#limitations)
|
||||
- [SQL statements](#sql-statements)
|
||||
- [SQL functions](#sql-functions)
|
||||
- [Scalar functions](#scalar-functions)
|
||||
- [Aggregate functions](#aggregate-functions)
|
||||
- [Date and time functions](#date-and-time-functions)
|
||||
- [JSON functions](#json-functions)
|
||||
- [SQLite API](#sqlite-api)
|
||||
- [SQLite VDBE opcodes](#sqlite-vdbe-opcodes)
|
||||
|
||||
## Limitations
|
||||
|
||||
@@ -51,6 +56,7 @@ This document describes the SQLite compatibility status of Limbo:
|
||||
| SELECT ... LIMIT | Yes | |
|
||||
| SELECT ... ORDER BY | Partial | |
|
||||
| SELECT ... GROUP BY | Partial | |
|
||||
| SELECT ... HAVING | Partial | |
|
||||
| SELECT ... JOIN | Partial | |
|
||||
| SELECT ... CROSS JOIN | Partial | |
|
||||
| SELECT ... INNER JOIN | Partial | |
|
||||
|
||||
@@ -20,7 +20,7 @@ use super::expr::{
|
||||
ConditionMetadata,
|
||||
};
|
||||
use super::optimizer::Optimizable;
|
||||
use super::plan::{Aggregate, BTreeTableReference, Direction, Plan};
|
||||
use super::plan::{Aggregate, BTreeTableReference, Direction, GroupBy, Plan};
|
||||
use super::plan::{ResultSetColumn, SourceOperator};
|
||||
|
||||
// Metadata for handling LEFT JOIN operations
|
||||
@@ -282,7 +282,7 @@ fn init_order_by(
|
||||
/// Initialize resources needed for GROUP BY processing
|
||||
fn init_group_by(
|
||||
program: &mut ProgramBuilder,
|
||||
group_by: &Vec<ast::Expr>,
|
||||
group_by: &GroupBy,
|
||||
aggregates: &Vec<Aggregate>,
|
||||
metadata: &mut Metadata,
|
||||
) -> Result<()> {
|
||||
@@ -294,8 +294,8 @@ fn init_group_by(
|
||||
|
||||
let abort_flag_register = program.alloc_register();
|
||||
let data_in_accumulator_indicator_register = program.alloc_register();
|
||||
let group_exprs_comparison_register = program.alloc_registers(group_by.len());
|
||||
let group_exprs_accumulator_register = program.alloc_registers(group_by.len());
|
||||
let group_exprs_comparison_register = program.alloc_registers(group_by.exprs.len());
|
||||
let group_exprs_accumulator_register = program.alloc_registers(group_by.exprs.len());
|
||||
let agg_exprs_start_reg = program.alloc_registers(num_aggs);
|
||||
let sorter_key_register = program.alloc_register();
|
||||
|
||||
@@ -304,12 +304,12 @@ fn init_group_by(
|
||||
|
||||
let mut order = Vec::new();
|
||||
const ASCENDING: i64 = 0;
|
||||
for _ in group_by.iter() {
|
||||
for _ in group_by.exprs.iter() {
|
||||
order.push(OwnedValue::Integer(ASCENDING));
|
||||
}
|
||||
program.emit_insn(Insn::SorterOpen {
|
||||
cursor_id: sort_cursor,
|
||||
columns: aggregates.len() + group_by.len(),
|
||||
columns: aggregates.len() + group_by.exprs.len(),
|
||||
order: OwnedRecord::new(order),
|
||||
});
|
||||
|
||||
@@ -325,8 +325,8 @@ fn init_group_by(
|
||||
);
|
||||
program.emit_insn(Insn::Null {
|
||||
dest: group_exprs_comparison_register,
|
||||
dest_end: if group_by.len() > 1 {
|
||||
Some(group_exprs_comparison_register + group_by.len() - 1)
|
||||
dest_end: if group_by.exprs.len() > 1 {
|
||||
Some(group_exprs_comparison_register + group_by.exprs.len() - 1)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -778,7 +778,7 @@ fn open_loop(
|
||||
/// - a ResultRow (there is none of the above, so the loop emits a result row directly)
|
||||
pub enum InnerLoopEmitTarget<'a> {
|
||||
GroupBySorter {
|
||||
group_by: &'a Vec<ast::Expr>,
|
||||
group_by: &'a GroupBy,
|
||||
aggregates: &'a Vec<Aggregate>,
|
||||
},
|
||||
OrderBySorter {
|
||||
@@ -874,7 +874,7 @@ fn inner_loop_source_emit(
|
||||
group_by,
|
||||
aggregates,
|
||||
} => {
|
||||
let sort_keys_count = group_by.len();
|
||||
let sort_keys_count = group_by.exprs.len();
|
||||
let aggregate_arguments_count =
|
||||
aggregates.iter().map(|agg| agg.args.len()).sum::<usize>();
|
||||
let column_count = sort_keys_count + aggregate_arguments_count;
|
||||
@@ -882,7 +882,7 @@ fn inner_loop_source_emit(
|
||||
let mut cur_reg = start_reg;
|
||||
|
||||
// The group by sorter rows will contain the grouping keys first. They are also the sort keys.
|
||||
for expr in group_by.iter() {
|
||||
for expr in group_by.exprs.iter() {
|
||||
let key_reg = cur_reg;
|
||||
cur_reg += 1;
|
||||
translate_expr(program, Some(referenced_tables), expr, key_reg, None)?;
|
||||
@@ -1124,7 +1124,7 @@ fn close_loop(
|
||||
fn group_by_emit(
|
||||
program: &mut ProgramBuilder,
|
||||
result_columns: &Vec<ResultSetColumn>,
|
||||
group_by: &Vec<ast::Expr>,
|
||||
group_by: &GroupBy,
|
||||
order_by: Option<&Vec<(ast::Expr, Direction)>>,
|
||||
aggregates: &Vec<Aggregate>,
|
||||
limit: Option<usize>,
|
||||
@@ -1153,7 +1153,7 @@ fn group_by_emit(
|
||||
// all group by columns and all arguments of agg functions are in the sorter.
|
||||
// the sort keys are the group by columns (the aggregation within groups is done based on how long the sort keys remain the same)
|
||||
let sorter_column_count =
|
||||
group_by.len() + aggregates.iter().map(|agg| agg.args.len()).sum::<usize>();
|
||||
group_by.exprs.len() + aggregates.iter().map(|agg| agg.args.len()).sum::<usize>();
|
||||
// sorter column names do not matter
|
||||
let pseudo_columns = (0..sorter_column_count)
|
||||
.map(|i| Column {
|
||||
@@ -1194,8 +1194,8 @@ fn group_by_emit(
|
||||
});
|
||||
|
||||
// Read the group by columns from the pseudo cursor
|
||||
let groups_start_reg = program.alloc_registers(group_by.len());
|
||||
for i in 0..group_by.len() {
|
||||
let groups_start_reg = program.alloc_registers(group_by.exprs.len());
|
||||
for i in 0..group_by.exprs.len() {
|
||||
let sorter_column_index = i;
|
||||
let group_reg = groups_start_reg + i;
|
||||
program.emit_insn(Insn::Column {
|
||||
@@ -1209,7 +1209,7 @@ fn group_by_emit(
|
||||
program.emit_insn(Insn::Compare {
|
||||
start_reg_a: comparison_register,
|
||||
start_reg_b: groups_start_reg,
|
||||
count: group_by.len(),
|
||||
count: group_by.exprs.len(),
|
||||
});
|
||||
|
||||
let agg_step_label = program.allocate_label();
|
||||
@@ -1232,7 +1232,7 @@ fn group_by_emit(
|
||||
program.emit_insn(Insn::Move {
|
||||
source_reg: groups_start_reg,
|
||||
dest_reg: comparison_register,
|
||||
count: group_by.len(),
|
||||
count: group_by.exprs.len(),
|
||||
});
|
||||
|
||||
program.add_comment(
|
||||
@@ -1269,7 +1269,7 @@ fn group_by_emit(
|
||||
// Accumulate the values into the aggregations
|
||||
program.resolve_label(agg_step_label, program.offset());
|
||||
let start_reg = metadata.aggregation_start_register.unwrap();
|
||||
let mut cursor_index = group_by.len();
|
||||
let mut cursor_index = group_by.exprs.len();
|
||||
for (i, agg) in aggregates.iter().enumerate() {
|
||||
let agg_result_reg = start_reg + i;
|
||||
translate_aggregation_groupby(
|
||||
@@ -1298,7 +1298,7 @@ fn group_by_emit(
|
||||
);
|
||||
|
||||
// Read the group by columns for a finished group
|
||||
for i in 0..group_by.len() {
|
||||
for i in 0..group_by.exprs.len() {
|
||||
let key_reg = group_exprs_start_register + i;
|
||||
let sorter_column_index = i;
|
||||
program.emit_insn(Insn::Column {
|
||||
@@ -1366,6 +1366,11 @@ fn group_by_emit(
|
||||
},
|
||||
termination_label,
|
||||
);
|
||||
let group_by_end_without_emitting_row_label = program.allocate_label();
|
||||
program.defer_label_resolution(
|
||||
group_by_end_without_emitting_row_label,
|
||||
program.offset() as usize,
|
||||
);
|
||||
program.emit_insn(Insn::Return {
|
||||
return_reg: group_by_metadata.subroutine_accumulator_output_return_offset_register,
|
||||
});
|
||||
@@ -1387,14 +1392,31 @@ fn group_by_emit(
|
||||
// and the agg results in (agg_start_reg..agg_start_reg + aggregates.len() - 1)
|
||||
// we need to call translate_expr on each result column, but replace the expr with a register copy in case any part of the
|
||||
// result column expression matches a) a group by column or b) an aggregation result.
|
||||
let mut precomputed_exprs_to_register = Vec::with_capacity(aggregates.len() + group_by.len());
|
||||
for (i, expr) in group_by.iter().enumerate() {
|
||||
let mut precomputed_exprs_to_register =
|
||||
Vec::with_capacity(aggregates.len() + group_by.exprs.len());
|
||||
for (i, expr) in group_by.exprs.iter().enumerate() {
|
||||
precomputed_exprs_to_register.push((expr, group_exprs_start_register + i));
|
||||
}
|
||||
for (i, agg) in aggregates.iter().enumerate() {
|
||||
precomputed_exprs_to_register.push((&agg.original_expr, agg_start_reg + i));
|
||||
}
|
||||
|
||||
if let Some(having) = &group_by.having {
|
||||
for expr in having.iter() {
|
||||
translate_condition_expr(
|
||||
program,
|
||||
referenced_tables,
|
||||
expr,
|
||||
ConditionMetadata {
|
||||
jump_if_condition_is_true: false,
|
||||
jump_target_when_false: group_by_end_without_emitting_row_label,
|
||||
jump_target_when_true: i64::MAX, // unused
|
||||
},
|
||||
Some(&precomputed_exprs_to_register),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
match order_by {
|
||||
None => {
|
||||
emit_select_result(
|
||||
@@ -1430,7 +1452,7 @@ fn group_by_emit(
|
||||
let start_reg = group_by_metadata.group_exprs_accumulator_register;
|
||||
program.emit_insn(Insn::Null {
|
||||
dest: start_reg,
|
||||
dest_end: Some(start_reg + group_by.len() + aggregates.len() - 1),
|
||||
dest_end: Some(start_reg + group_by.exprs.len() + aggregates.len() - 1),
|
||||
});
|
||||
|
||||
program.emit_insn(Insn::Integer {
|
||||
|
||||
@@ -871,7 +871,6 @@ pub fn translate_expr(
|
||||
for arg in args.iter() {
|
||||
let reg = program.alloc_register();
|
||||
start_reg = Some(start_reg.unwrap_or(reg));
|
||||
|
||||
translate_expr(
|
||||
program,
|
||||
referenced_tables,
|
||||
|
||||
@@ -19,6 +19,13 @@ pub struct ResultSetColumn {
|
||||
pub contains_aggregates: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GroupBy {
|
||||
pub exprs: Vec<ast::Expr>,
|
||||
/// having clause split into a vec at 'AND' boundaries.
|
||||
pub having: Option<Vec<ast::Expr>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Plan {
|
||||
/// A tree of sources (tables).
|
||||
@@ -28,7 +35,7 @@ pub struct Plan {
|
||||
/// where clause split into a vec at 'AND' boundaries.
|
||||
pub where_clause: Option<Vec<ast::Expr>>,
|
||||
/// group by clause
|
||||
pub group_by: Option<Vec<ast::Expr>>,
|
||||
pub group_by: Option<GroupBy>,
|
||||
/// order by clause
|
||||
pub order_by: Option<Vec<(ast::Expr, Direction)>>,
|
||||
/// all the aggregates collected from the result columns, order by, and (TODO) having clauses
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use super::plan::{
|
||||
Aggregate, BTreeTableReference, Direction, Plan, ResultSetColumn, SourceOperator,
|
||||
Aggregate, BTreeTableReference, Direction, GroupBy, Plan, ResultSetColumn, SourceOperator,
|
||||
};
|
||||
use crate::{function::Func, schema::Schema, util::normalize_ident, Result};
|
||||
use sqlite3_parser::ast::{self, FromClause, JoinType, ResultColumn};
|
||||
@@ -19,9 +19,9 @@ impl OperatorIdCounter {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_aggregates(expr: &ast::Expr, aggs: &mut Vec<Aggregate>) {
|
||||
fn resolve_aggregates(expr: &ast::Expr, aggs: &mut Vec<Aggregate>) -> bool {
|
||||
if aggs.iter().any(|a| a.original_expr == *expr) {
|
||||
return;
|
||||
return true;
|
||||
}
|
||||
match expr {
|
||||
ast::Expr::FunctionCall { name, args, .. } => {
|
||||
@@ -31,17 +31,22 @@ fn resolve_aggregates(expr: &ast::Expr, aggs: &mut Vec<Aggregate>) {
|
||||
0
|
||||
};
|
||||
match Func::resolve_function(normalize_ident(name.0.as_str()).as_str(), args_count) {
|
||||
Ok(Func::Agg(f)) => aggs.push(Aggregate {
|
||||
func: f,
|
||||
args: args.clone().unwrap_or_default(),
|
||||
original_expr: expr.clone(),
|
||||
}),
|
||||
Ok(Func::Agg(f)) => {
|
||||
aggs.push(Aggregate {
|
||||
func: f,
|
||||
args: args.clone().unwrap_or_default(),
|
||||
original_expr: expr.clone(),
|
||||
});
|
||||
true
|
||||
}
|
||||
_ => {
|
||||
let mut contains_aggregates = false;
|
||||
if let Some(args) = args {
|
||||
for arg in args.iter() {
|
||||
resolve_aggregates(arg, aggs);
|
||||
contains_aggregates |= resolve_aggregates(arg, aggs);
|
||||
}
|
||||
}
|
||||
contains_aggregates
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,15 +58,20 @@ fn resolve_aggregates(expr: &ast::Expr, aggs: &mut Vec<Aggregate>) {
|
||||
func: f,
|
||||
args: vec![],
|
||||
original_expr: expr.clone(),
|
||||
})
|
||||
});
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ast::Expr::Binary(lhs, _, rhs) => {
|
||||
resolve_aggregates(lhs, aggs);
|
||||
resolve_aggregates(rhs, aggs);
|
||||
let mut contains_aggregates = false;
|
||||
contains_aggregates |= resolve_aggregates(lhs, aggs);
|
||||
contains_aggregates |= resolve_aggregates(rhs, aggs);
|
||||
contains_aggregates
|
||||
}
|
||||
// TODO: handle other expressions that may contain aggregates
|
||||
_ => {}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -340,10 +350,8 @@ pub fn prepare_select_plan<'a>(schema: &Schema, select: ast::Select) -> Result<P
|
||||
});
|
||||
}
|
||||
Ok(_) => {
|
||||
let cur_agg_count = aggregate_expressions.len();
|
||||
resolve_aggregates(&expr, &mut aggregate_expressions);
|
||||
let contains_aggregates =
|
||||
cur_agg_count != aggregate_expressions.len();
|
||||
resolve_aggregates(&expr, &mut aggregate_expressions);
|
||||
plan.result_columns.push(ResultSetColumn {
|
||||
expr: expr.clone(),
|
||||
contains_aggregates,
|
||||
@@ -380,10 +388,8 @@ pub fn prepare_select_plan<'a>(schema: &Schema, select: ast::Select) -> Result<P
|
||||
}
|
||||
}
|
||||
expr => {
|
||||
let cur_agg_count = aggregate_expressions.len();
|
||||
resolve_aggregates(expr, &mut aggregate_expressions);
|
||||
let contains_aggregates =
|
||||
cur_agg_count != aggregate_expressions.len();
|
||||
resolve_aggregates(expr, &mut aggregate_expressions);
|
||||
plan.result_columns.push(ResultSetColumn {
|
||||
expr: expr.clone(),
|
||||
contains_aggregates,
|
||||
@@ -393,18 +399,37 @@ pub fn prepare_select_plan<'a>(schema: &Schema, select: ast::Select) -> Result<P
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(group_by) = group_by.as_mut() {
|
||||
if let Some(mut group_by) = group_by {
|
||||
for expr in group_by.exprs.iter_mut() {
|
||||
bind_column_references(expr, &plan.referenced_tables)?;
|
||||
}
|
||||
if aggregate_expressions.is_empty() {
|
||||
crate::bail_parse_error!(
|
||||
"GROUP BY clause without aggregate functions is not allowed"
|
||||
);
|
||||
}
|
||||
|
||||
plan.group_by = Some(GroupBy {
|
||||
exprs: group_by.exprs,
|
||||
having: if let Some(having) = group_by.having {
|
||||
let mut predicates = vec![];
|
||||
break_predicate_at_and_boundaries(having, &mut predicates);
|
||||
for expr in predicates.iter_mut() {
|
||||
bind_column_references(expr, &plan.referenced_tables)?;
|
||||
let contains_aggregates =
|
||||
resolve_aggregates(expr, &mut aggregate_expressions);
|
||||
if !contains_aggregates {
|
||||
// TODO: sqlite allows HAVING clauses with non aggregate expressions like
|
||||
// HAVING id = 5. We should support this too eventually (I guess).
|
||||
// sqlite3-parser does not support HAVING without group by though, so we'll
|
||||
// need to either make a PR or add it to our vendored version.
|
||||
crate::bail_parse_error!(
|
||||
"HAVING clause must contain an aggregate function"
|
||||
);
|
||||
}
|
||||
}
|
||||
Some(predicates)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
plan.group_by = group_by.map(|g| g.exprs);
|
||||
plan.aggregates = if aggregate_expressions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
||||
@@ -124,7 +124,9 @@ impl PartialOrd<OwnedValue> for OwnedValue {
|
||||
(OwnedValue::Null, _) => Some(std::cmp::Ordering::Less),
|
||||
(_, OwnedValue::Null) => Some(std::cmp::Ordering::Greater),
|
||||
(OwnedValue::Agg(a), OwnedValue::Agg(b)) => a.partial_cmp(b),
|
||||
_ => None,
|
||||
(OwnedValue::Agg(a), other) => a.final_value().partial_cmp(other),
|
||||
(other, OwnedValue::Agg(b)) => other.partial_cmp(b.final_value()),
|
||||
other => todo!("{:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2189,7 +2189,12 @@ impl Program {
|
||||
}
|
||||
ScalarFunc::Round => {
|
||||
let reg_value = state.registers[*start_reg].clone();
|
||||
let precision_value = state.registers.get(*start_reg + 1).cloned();
|
||||
assert!(arg_count == 1 || arg_count == 2);
|
||||
let precision_value = if arg_count > 1 {
|
||||
Some(state.registers[*start_reg + 1].clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let result = exec_round(®_value, precision_value);
|
||||
state.registers[*dest] = result;
|
||||
}
|
||||
@@ -2554,7 +2559,10 @@ fn exec_concat(registers: &[OwnedValue]) -> OwnedValue {
|
||||
OwnedValue::Text(text) => result.push_str(text),
|
||||
OwnedValue::Integer(i) => result.push_str(&i.to_string()),
|
||||
OwnedValue::Float(f) => result.push_str(&f.to_string()),
|
||||
_ => continue,
|
||||
OwnedValue::Agg(aggctx) => result.push_str(&aggctx.final_value().to_string()),
|
||||
OwnedValue::Null => continue,
|
||||
OwnedValue::Blob(_) => todo!("TODO concat blob"),
|
||||
OwnedValue::Record(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
OwnedValue::Text(Rc::new(result))
|
||||
@@ -2909,20 +2917,27 @@ fn exec_unicode(reg: &OwnedValue) -> OwnedValue {
|
||||
}
|
||||
}
|
||||
|
||||
fn _to_float(reg: &OwnedValue) -> f64 {
|
||||
match reg {
|
||||
OwnedValue::Text(x) => x.parse().unwrap_or(0.0),
|
||||
OwnedValue::Integer(x) => *x as f64,
|
||||
OwnedValue::Float(x) => *x,
|
||||
_ => 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
fn exec_round(reg: &OwnedValue, precision: Option<OwnedValue>) -> OwnedValue {
|
||||
let precision = match precision {
|
||||
Some(OwnedValue::Text(x)) => x.parse().unwrap_or(0.0),
|
||||
Some(OwnedValue::Integer(x)) => x as f64,
|
||||
Some(OwnedValue::Float(x)) => x,
|
||||
None => 0.0,
|
||||
_ => return OwnedValue::Null,
|
||||
Some(OwnedValue::Null) => return OwnedValue::Null,
|
||||
_ => 0.0,
|
||||
};
|
||||
|
||||
let reg = match reg {
|
||||
OwnedValue::Text(x) => x.parse().unwrap_or(0.0),
|
||||
OwnedValue::Integer(x) => *x as f64,
|
||||
OwnedValue::Float(x) => *x,
|
||||
_ => return reg.to_owned(),
|
||||
OwnedValue::Agg(ctx) => _to_float(ctx.final_value()),
|
||||
_ => _to_float(reg),
|
||||
};
|
||||
|
||||
let precision = if precision < 1.0 { 0.0 } else { precision };
|
||||
@@ -3763,6 +3778,14 @@ mod tests {
|
||||
let precision_val = OwnedValue::Integer(1);
|
||||
let expected_val = OwnedValue::Float(123.0);
|
||||
assert_eq!(exec_round(&input_val, Some(precision_val)), expected_val);
|
||||
|
||||
let input_val = OwnedValue::Float(100.123);
|
||||
let expected_val = OwnedValue::Float(100.0);
|
||||
assert_eq!(exec_round(&input_val, None), expected_val);
|
||||
|
||||
let input_val = OwnedValue::Float(100.123);
|
||||
let expected_val = OwnedValue::Null;
|
||||
assert_eq!(exec_round(&input_val, Some(OwnedValue::Null)), expected_val);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -130,4 +130,35 @@ do_execsql_test group_by_function_expression_ridiculous {
|
||||
|
||||
do_execsql_test group_by_count_star {
|
||||
select u.first_name, count(*) from users u group by u.first_name limit 1;
|
||||
} {Aaron|41}
|
||||
} {Aaron|41}
|
||||
|
||||
do_execsql_test having {
|
||||
select u.first_name, round(avg(u.age)) from users u group by u.first_name having avg(u.age) > 97 order by avg(u.age) desc limit 5;
|
||||
} {Nina|100.0
|
||||
Kurt|99.0
|
||||
Selena|98.0}
|
||||
|
||||
do_execsql_test having_with_binary_cond {
|
||||
select u.first_name, sum(u.age) from users u group by u.first_name having sum(u.age) + 1000 = 9109;
|
||||
} {Robert|8109}
|
||||
|
||||
do_execsql_test having_with_scalar_fn_over_aggregate {
|
||||
select u.first_name, concat(count(1), ' people with this name') from users u group by u.first_name having count(1) > 50 order by count(1) asc limit 5;
|
||||
} {"Angela|51 people with this name
|
||||
Justin|51 people with this name
|
||||
Rachel|52 people with this name
|
||||
Susan|52 people with this name
|
||||
Jeffrey|54 people with this name"}
|
||||
|
||||
do_execsql_test having_with_multiple_conditions {
|
||||
select u.first_name, count(*), round(avg(u.age)) as avg_age
|
||||
from users u
|
||||
group by u.first_name
|
||||
having count(*) > 40 and avg(u.age) > 40
|
||||
order by count(*) desc, avg(u.age) desc
|
||||
limit 5;
|
||||
} {Michael|228|49.0
|
||||
David|165|53.0
|
||||
Robert|159|51.0
|
||||
Jennifer|151|51.0
|
||||
John|145|50.0}
|
||||
Reference in New Issue
Block a user