fix clippy errors for rust 1.88.0 (auto fix)

This commit is contained in:
Nils Koch
2025-06-23 22:14:33 +01:00
committed by Jussi Saurio
parent b68aaebe50
commit 828d4f5016
70 changed files with 404 additions and 558 deletions

View File

@@ -34,7 +34,7 @@ impl<'conn> LimboRows<'conn> {
fn get_error(&mut self) -> *const c_char {
if let Some(err) = &self.err {
let err = format!("{}", err);
let err = format!("{err}");
let c_str = std::ffi::CString::new(err).unwrap();
self.err = None;
c_str.into_raw() as *const c_char

View File

@@ -172,7 +172,7 @@ impl<'conn> LimboStatement<'conn> {
fn get_error(&mut self) -> *const c_char {
if let Some(err) = &self.err {
let err = format!("{}", err);
let err = format!("{err}");
let c_str = std::ffi::CString::new(err).unwrap();
self.err = None;
c_str.into_raw() as *const c_char

View File

@@ -118,7 +118,7 @@ fn row_to_obj_array<'local>(
turso_core::Value::Blob(b) => env.byte_array_from_slice(b.as_slice())?.into(),
};
if let Err(e) = env.set_object_array_element(&obj_array, i as i32, obj) {
eprintln!("Error on parsing row: {:?}", e);
eprintln!("Error on parsing row: {e:?}");
}
}

View File

@@ -36,5 +36,5 @@ async fn main() {
let value = row.get_value(0).unwrap();
println!("Row: {:?}", value);
println!("Row: {value:?}");
}

View File

@@ -68,8 +68,9 @@ impl RowIterator {
Ok(turso_core::StepResult::Done) | Ok(turso_core::StepResult::Interrupt) => {
JsValue::UNDEFINED
}
Ok(turso_core::StepResult::Busy) => JsValue::UNDEFINED,
Err(e) => panic!("Error: {:?}", e),
Err(e) => panic!("Error: {e:?}"),
}
}
}
@@ -104,11 +105,12 @@ impl Statement {
}
JsValue::from(row_array)
}
Ok(turso_core::StepResult::IO)
| Ok(turso_core::StepResult::Done)
| Ok(turso_core::StepResult::Interrupt)
| Ok(turso_core::StepResult::Busy) => JsValue::UNDEFINED,
Err(e) => panic!("Error: {:?}", e),
Err(e) => panic!("Error: {e:?}"),
}
}
@@ -130,7 +132,7 @@ impl Statement {
Ok(turso_core::StepResult::Interrupt) => break,
Ok(turso_core::StepResult::Done) => break,
Ok(turso_core::StepResult::Busy) => break,
Err(e) => panic!("Error: {:?}", e),
Err(e) => panic!("Error: {e:?}"),
}
}
array

View File

@@ -37,7 +37,7 @@ impl<'a> ImportFile<'a> {
let file = match File::open(args.file) {
Ok(file) => file,
Err(e) => {
let _ = self.writer.write_all(format!("{:?}\n", e).as_bytes());
let _ = self.writer.write_all(format!("{e:?}\n").as_bytes());
return;
}
};

View File

@@ -96,7 +96,7 @@ impl TryFrom<&str> for LimboColor {
"dark-cyan" => Color::Fixed(6),
"grey" => Color::Fixed(7),
"dark-grey" => Color::Fixed(8),
_ => return Err(format!("Could not parse color in string: {}", value)),
_ => return Err(format!("Could not parse color in string: {value}")),
};
trace!("Read predefined color: {}", value);

View File

@@ -29,7 +29,7 @@ impl Display for Io {
Io::Syscall => write!(f, "syscall"),
#[cfg(all(target_os = "linux", feature = "io_uring"))]
Io::IoUring => write!(f, "io_uring"),
Io::External(str) => write!(f, "{}", str),
Io::External(str) => write!(f, "{str}"),
}
}
}
@@ -144,7 +144,7 @@ pub fn get_writer(output: &str) -> Box<dyn Write> {
_ => match std::fs::File::create(output) {
Ok(file) => Box::new(file),
Err(e) => {
eprintln!("Error: {}", e);
eprintln!("Error: {e}");
Box::new(io::stdout())
}
},

View File

@@ -51,7 +51,7 @@ fn main() -> anyhow::Result<()> {
Ok(line) => match app.handle_input_line(line.trim()) {
Ok(_) => {}
Err(e) => {
eprintln!("{}", e);
eprintln!("{e}");
}
},
Err(ReadlineError::Interrupted) => {

View File

@@ -69,7 +69,7 @@ fn bench_prepare_query(criterion: &mut Criterion) {
];
for query in queries.iter() {
let mut group = criterion.benchmark_group(format!("Prepare `{}`", query));
let mut group = criterion.benchmark_group(format!("Prepare `{query}`"));
group.bench_with_input(
BenchmarkId::new("limbo_parse_query", query),

View File

@@ -443,7 +443,7 @@ fn bench(criterion: &mut Criterion) {
for (size_name, json_payload) in json_sizes.iter() {
let query = format!("SELECT jsonb('{}')", json_payload.replace("'", "\\'"));
let mut group = criterion.benchmark_group(format!("JSONB Size - {}", size_name));
let mut group = criterion.benchmark_group(format!("JSONB Size - {size_name}"));
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
@@ -893,7 +893,7 @@ fn bench_json_patch(criterion: &mut Criterion) {
patch_json.replace("'", "''")
);
let mut group = criterion.benchmark_group(format!("JSON Patch - {}", case_name));
let mut group = criterion.benchmark_group(format!("JSON Patch - {case_name}"));
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();

View File

@@ -81,7 +81,7 @@ fn bench_tpc_h_queries(criterion: &mut Criterion) {
];
for (idx, query) in queries.iter() {
let mut group = criterion.benchmark_group(format!("Query `{}` ", idx));
let mut group = criterion.benchmark_group(format!("Query `{idx}` "));
group.sampling_mode(SamplingMode::Flat);
group.sample_size(10);

View File

@@ -102,10 +102,7 @@ impl Database {
other => match get_vfs_modules().iter().find(|v| v.0 == vfs) {
Some((_, vfs)) => vfs.clone(),
None => {
return Err(LimboError::InvalidArgument(format!(
"no such VFS: {}",
other
)));
return Err(LimboError::InvalidArgument(format!("no such VFS: {other}")));
}
},
};

View File

@@ -173,7 +173,7 @@ impl Display for VectorFunc {
Self::VectorExtract => "vector_extract".to_string(),
Self::VectorDistanceCos => "vector_distance_cos".to_string(),
};
write!(f, "{}", str)
write!(f, "{str}")
}
}
@@ -435,7 +435,7 @@ impl Display for ScalarFunc {
Self::TimeDiff => "timediff".to_string(),
Self::Likelihood => "likelihood".to_string(),
};
write!(f, "{}", str)
write!(f, "{str}")
}
}
@@ -550,7 +550,7 @@ impl Display for MathFunc {
Self::Tanh => "tanh".to_string(),
Self::Trunc => "trunc".to_string(),
};
write!(f, "{}", str)
write!(f, "{str}")
}
}
@@ -585,13 +585,13 @@ impl Display for Func {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Agg(agg_func) => write!(f, "{}", agg_func.to_string()),
Self::Scalar(scalar_func) => write!(f, "{}", scalar_func),
Self::Math(math_func) => write!(f, "{}", math_func),
Self::Vector(vector_func) => write!(f, "{}", vector_func),
Self::Scalar(scalar_func) => write!(f, "{scalar_func}"),
Self::Math(math_func) => write!(f, "{math_func}"),
Self::Vector(vector_func) => write!(f, "{vector_func}"),
#[cfg(feature = "json")]
Self::Json(json_func) => write!(f, "{}", json_func),
Self::External(generic_func) => write!(f, "{}", generic_func),
Self::AlterTable(alter_func) => write!(f, "{}", alter_func),
Self::Json(json_func) => write!(f, "{json_func}"),
Self::External(generic_func) => write!(f, "{generic_func}"),
Self::AlterTable(alter_func) => write!(f, "{alter_func}"),
}
}
}
@@ -636,7 +636,7 @@ impl Func {
}
"group_concat" => {
if arg_count != 1 && arg_count != 2 {
println!("{}", arg_count);
println!("{arg_count}");
crate::bail_parse_error!("wrong number of arguments to function {}()", name)
}
Ok(Self::Agg(AggFunc::GroupConcat))

View File

@@ -29,7 +29,7 @@ pub fn exec_strftime(values: &[Register]) -> Value {
let value = &values[0].get_owned_value();
let format_str = if matches!(value, Value::Text(_) | Value::Integer(_) | Value::Float(_)) {
format!("{}", value)
format!("{value}")
} else {
return Value::Null;
};
@@ -416,8 +416,8 @@ fn get_date_time_from_time_value_string(value: &str) -> Option<NaiveDateTime> {
// For time-only formats, assume date 2000-01-01
// Ref: https://sqlite.org/lang_datefunc.html#tmval
parse_datetime_with_optional_tz(
&format!("2000-01-01 {}", value),
&format!("%Y-%m-%d {}", format),
&format!("2000-01-01 {value}"),
&format!("%Y-%m-%d {format}"),
)
} else {
parse_datetime_with_optional_tz(value, format)
@@ -463,10 +463,7 @@ fn get_date_time_from_time_value_float(value: f64) -> Option<NaiveDateTime> {
if value.is_infinite() || value.is_nan() || !is_julian_day_value(value) {
return None;
}
match julian_day_converter::julian_day_to_datetime(value) {
Ok(dt) => Some(dt),
Err(_) => None,
}
julian_day_converter::julian_day_to_datetime(value).ok()
}
fn is_leap_second(dt: &NaiveDateTime) -> bool {
@@ -521,7 +518,7 @@ enum Modifier {
fn parse_modifier_number(s: &str) -> Result<i64> {
s.trim()
.parse::<i64>()
.map_err(|_| InvalidModifier(format!("Invalid number: {}", s)))
.map_err(|_| InvalidModifier(format!("Invalid number: {s}")))
}
/// supports YYYY-MM-DD format for time shift modifiers
@@ -539,9 +536,9 @@ fn parse_modifier_time(s: &str) -> Result<NaiveTime> {
5 => NaiveTime::parse_from_str(s, "%H:%M"),
8 => NaiveTime::parse_from_str(s, "%H:%M:%S"),
12 => NaiveTime::parse_from_str(s, "%H:%M:%S.%3f"),
_ => return Err(InvalidModifier(format!("Invalid time format: {}", s))),
_ => return Err(InvalidModifier(format!("Invalid time format: {s}"))),
}
.map_err(|_| InvalidModifier(format!("Invalid time format: {}", s)))
.map_err(|_| InvalidModifier(format!("Invalid time format: {s}")))
}
fn parse_modifier(modifier: &str) -> Result<Modifier> {
@@ -811,8 +808,7 @@ mod tests {
assert_eq!(
result,
Value::build_text(expected),
"Failed for input: {:?}",
input
"Failed for input: {input:?}"
);
}
}
@@ -851,10 +847,7 @@ mod tests {
let result = exec_date(&[Register::Value(case.clone())]);
match result {
Value::Text(ref result_str) if result_str.value.is_empty() => (),
_ => panic!(
"Expected empty string for input: {:?}, but got: {:?}",
case, result
),
_ => panic!("Expected empty string for input: {case:?}, but got: {result:?}"),
}
}
}
@@ -947,7 +940,7 @@ mod tests {
if let Value::Text(result_str) = result {
assert_eq!(result_str.as_str(), expected);
} else {
panic!("Expected Value::Text, but got: {:?}", result);
panic!("Expected Value::Text, but got: {result:?}");
}
}
}
@@ -986,10 +979,7 @@ mod tests {
let result = exec_time(&[Register::Value(case.clone())]);
match result {
Value::Text(ref result_str) if result_str.value.is_empty() => (),
_ => panic!(
"Expected empty string for input: {:?}, but got: {:?}",
case, result
),
_ => panic!("Expected empty string for input: {case:?}, but got: {result:?}"),
}
}
}

View File

@@ -34,8 +34,8 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
}
let value = &values[args_index].get_owned_value();
match value {
Value::Integer(_) => result.push_str(&format!("{}", value)),
Value::Float(_) => result.push_str(&format!("{}", value)),
Value::Integer(_) => result.push_str(&format!("{value}")),
Value::Float(_) => result.push_str(&format!("{value}")),
_ => result.push('0'),
}
args_index += 1;
@@ -47,7 +47,7 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
match &values[args_index].get_owned_value() {
Value::Text(t) => result.push_str(t.as_str()),
Value::Null => result.push_str("(null)"),
v => result.push_str(&format!("{}", v)),
v => result.push_str(&format!("{v}")),
}
args_index += 1;
}
@@ -57,7 +57,7 @@ pub fn exec_printf(values: &[Register]) -> crate::Result<Value> {
}
let value = &values[args_index].get_owned_value();
match value {
Value::Float(f) => result.push_str(&format!("{:.6}", f)),
Value::Float(f) => result.push_str(&format!("{f:.6}")),
Value::Integer(i) => result.push_str(&format!("{:.6}", *i as f64)),
_ => result.push_str("0.0"),
}

View File

@@ -314,7 +314,7 @@ impl File for UnixFile<'_> {
ErrorKind::WouldBlock => {
"Failed locking file. File is locked by another process".to_string()
}
_ => format!("Failed locking file, {}", io_error),
_ => format!("Failed locking file, {io_error}"),
};
LimboError::LockingError(message)
})?;

View File

@@ -38,7 +38,7 @@ impl From<std::str::Utf8Error> for Error {
impl Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Message { ref msg, .. } => write!(formatter, "{}", msg),
Self::Message { ref msg, .. } => write!(formatter, "{msg}"),
}
}
}

View File

@@ -1136,7 +1136,7 @@ impl Jsonb {
b'\r' => string.push_str("\\r"),
_ => {
// Format as \u00XX
let hex = format!("\\u{:04x}", ch);
let hex = format!("\\u{ch:04x}");
string.push_str(&hex);
}
}
@@ -1304,7 +1304,7 @@ impl Jsonb {
value = value * 16 + ch.to_digit(16).unwrap_or(0) as u64;
}
write!(string, "{}", value)
write!(string, "{value}")
.map_err(|_| LimboError::ParseError("Error writing string to json!".to_string()))?;
} else {
string.push_str(hex_str);
@@ -1336,7 +1336,7 @@ impl Jsonb {
val if val
.chars()
.next()
.map_or(false, |c| c.is_ascii_alphanumeric() || c == '+' || c == '-') =>
.is_some_and(|c| c.is_ascii_alphanumeric() || c == '+' || c == '-') =>
{
string.push_str(val);
string.push('0');
@@ -1403,7 +1403,7 @@ impl Jsonb {
|| c == b'-'
|| c == b'+'
|| c == b'.'
|| c.to_ascii_lowercase() == b'i' =>
|| c.eq_ignore_ascii_case(&b'i') =>
{
pos = self.deserialize_number(input, pos)?;
}
@@ -2113,7 +2113,7 @@ impl Jsonb {
std::cmp::Ordering::Greater => {
self.data.splice(
cursor + old_len..cursor + old_len,
std::iter::repeat(0).take(new_len - old_len),
std::iter::repeat_n(0, new_len - old_len),
);
}
std::cmp::Ordering::Less => {
@@ -3562,7 +3562,7 @@ world""#,
// Generate a large JSON with many elements
let mut large_array = String::from("[");
for i in 0..1000 {
large_array.push_str(&format!("{}", i));
large_array.push_str(&format!("{i}"));
if i < 999 {
large_array.push(',');
}

View File

@@ -737,7 +737,7 @@ mod tests {
let binary_json: Vec<u8> = vec![0xA2, 0x62, 0x6B, 0x31, 0x62, 0x76]; // Incomplete binary JSON
let input = Value::Blob(binary_json);
let result = get_json(&input, None);
println!("{:?}", result);
println!("{result:?}");
match result {
Ok(_) => panic!("Expected error for malformed JSON"),
Err(e) => assert!(e.to_string().contains("malformed JSON")),
@@ -923,7 +923,7 @@ mod tests {
match result {
Ok(Value::Null) => (),
_ => panic!("Expected null result, got: {:?}", result),
_ => panic!("Expected null result, got: {result:?}"),
}
}
#[test]
@@ -937,7 +937,7 @@ mod tests {
match result {
Ok(Value::Null) => (),
_ => panic!("Expected null result, got: {:?}", result),
_ => panic!("Expected null result, got: {result:?}"),
}
}

View File

@@ -395,7 +395,7 @@ mod tests {
Err(crate::error::LimboError::ParseError(_)) => {
// happy path
}
_ => panic!("Expected error for: {:?}, got: {:?}", value, path),
_ => panic!("Expected error for: {value:?}, got: {path:?}"),
}
}
}

View File

@@ -176,7 +176,7 @@ impl Database {
enable_mvcc: bool,
enable_indexes: bool,
) -> Result<Arc<Database>> {
let wal_path = format!("{}-wal", path);
let wal_path = format!("{path}-wal");
let maybe_shared_wal = WalFileShared::open_shared_if_exists(&io, wal_path.as_str())?;
let db_size = db_file.size()?;
@@ -232,7 +232,7 @@ impl Database {
{
// this means that a vtab exists and we no longer have the module loaded. we print
// a warning to the user to load the module
eprintln!("Warning: {}", e);
eprintln!("Warning: {e}");
}
}
Ok(db)
@@ -848,7 +848,7 @@ impl Connection {
{
// this means that a vtab exists and we no longer have the module loaded. we print
// a warning to the user to load the module
eprintln!("Warning: {}", e);
eprintln!("Warning: {e}");
}
}
Ok(())
@@ -1049,7 +1049,7 @@ impl std::fmt::Debug for SymbolTable {
fn is_shared_library(path: &std::path::Path) -> bool {
path.extension()
.map_or(false, |ext| ext == "so" || ext == "dylib" || ext == "dll")
.is_some_and(|ext| ext == "so" || ext == "dylib" || ext == "dll")
}
pub fn resolve_ext_path(extpath: &str) -> Result<std::path::PathBuf> {
@@ -1057,8 +1057,7 @@ pub fn resolve_ext_path(extpath: &str) -> Result<std::path::PathBuf> {
if !path.exists() {
if is_shared_library(path) {
return Err(LimboError::ExtensionError(format!(
"Extension file not found: {}",
extpath
"Extension file not found: {extpath}"
)));
};
let maybe = path.with_extension(std::env::consts::DLL_EXTENSION);
@@ -1066,8 +1065,7 @@ pub fn resolve_ext_path(extpath: &str) -> Result<std::path::PathBuf> {
.exists()
.then_some(maybe)
.ok_or(LimboError::ExtensionError(format!(
"Extension file not found: {}",
extpath
"Extension file not found: {extpath}"
)))
} else {
Ok(path.to_path_buf())

View File

@@ -686,7 +686,7 @@ fn setup_sequential_db() -> (Rc<MvStore<TestClock>>, u64) {
let table_id = 1;
for i in 1..6 {
let id = RowID::new(table_id, i);
let data = format!("row{}", i).into_bytes();
let data = format!("row{i}").into_bytes();
let row = Row::new(id, data);
db.insert(tx_id, row).unwrap();
}

View File

@@ -144,8 +144,7 @@ impl PragmaVirtualTable {
fn no_such_pragma(pragma_name: &str) -> LimboError {
LimboError::ParseError(format!(
"No such table-valued function: pragma_{}",
pragma_name
"No such table-valued function: pragma_{pragma_name}"
))
}
@@ -246,7 +245,7 @@ impl PragmaVirtualTableCursor {
let mut sql = format!("PRAGMA {}", self.pragma_name);
if let Some(arg) = &self.arg {
sql.push_str(&format!("=\"{}\"", arg));
sql.push_str(&format!("=\"{arg}\""));
}
self.stmt = Some(self.conn.prepare(sql)?);

View File

@@ -274,20 +274,13 @@ mod tests {
let stop = series.stop;
let step = series.step;
let values = collect_series(series.clone()).unwrap_or_else(|e| {
panic!(
"Failed to generate series for start={}, stop={}, step={}: {:?}",
start, stop, step, e
)
panic!("Failed to generate series for start={start}, stop={stop}, step={step}: {e:?}")
});
if series_is_invalid_or_empty(&series) {
assert!(
values.is_empty(),
"Series should be empty for invalid range: start={}, stop={}, step={}, got {:?}",
start,
stop,
step,
values
"Series should be empty for invalid range: start={start}, stop={stop}, step={step}, got {values:?}"
);
} else {
let expected_len = series_expected_length(&series);
@@ -316,19 +309,13 @@ mod tests {
let step = series.step;
let values = collect_series(series.clone()).unwrap_or_else(|e| {
panic!(
"Failed to generate series for start={}, stop={}, step={}: {:?}",
start, stop, step, e
)
panic!("Failed to generate series for start={start}, stop={stop}, step={step}: {e:?}")
});
if series_is_invalid_or_empty(&series) {
assert!(
values.is_empty(),
"Series should be empty for invalid range: start={}, stop={}, step={}",
start,
stop,
step
"Series should be empty for invalid range: start={start}, stop={stop}, step={step}"
);
} else {
assert!(
@@ -356,19 +343,13 @@ mod tests {
let step = series.step;
let values = collect_series(series.clone()).unwrap_or_else(|e| {
panic!(
"Failed to generate series for start={}, stop={}, step={}: {:?}",
start, stop, step, e
)
panic!("Failed to generate series for start={start}, stop={stop}, step={step}: {e:?}")
});
if series_is_invalid_or_empty(&series) {
assert!(
values.is_empty(),
"Series should be empty for invalid range: start={}, stop={}, step={}",
start,
stop,
step
"Series should be empty for invalid range: start={start}, stop={stop}, step={step}"
);
} else if !values.is_empty() {
assert!(
@@ -396,37 +377,27 @@ mod tests {
let step = series.step;
let values = collect_series(series.clone()).unwrap_or_else(|e| {
panic!(
"Failed to generate series for start={}, stop={}, step={}: {:?}",
start, stop, step, e
)
panic!("Failed to generate series for start={start}, stop={stop}, step={step}: {e:?}")
});
if series_is_invalid_or_empty(&series) {
assert!(
values.is_empty(),
"Series should be empty for invalid range: start={}, stop={}, step={}",
start,
stop,
step
"Series should be empty for invalid range: start={start}, stop={stop}, step={step}"
);
} else if !values.is_empty() {
assert_eq!(
values.first(),
Some(&start),
"Series doesn't start with start value: {:?} (expected start: {})",
values,
start
"Series doesn't start with start value: {values:?} (expected start: {start})"
);
assert!(
values.last().map_or(true, |&last| if step > 0 {
values.last().is_none_or(|&last| if step > 0 {
last <= stop
} else {
last >= stop
}),
"Series exceeds stop value: {:?} (stop: {})",
values,
stop
"Series exceeds stop value: {values:?} (stop: {stop})"
);
}
}
@@ -501,8 +472,7 @@ mod tests {
.expect("Failed to generate series");
assert!(
values.is_empty(),
"Invalid positive range should return empty series, got {:?}",
values
"Invalid positive range should return empty series, got {values:?}"
);
let values = collect_series(Series {
@@ -564,20 +534,15 @@ mod tests {
match cursor.next() {
ResultCode::OK => rowids.push(cur_rowid),
ResultCode::EOF => break,
err => panic!(
"Unexpected error {:?} for start={}, stop={}, step={}",
err, start, stop, step
),
err => {
panic!("Unexpected error {err:?} for start={start}, stop={stop}, step={step}")
}
}
}
assert!(
rowids.windows(2).all(|w| w[1] == w[0] + 1),
"Rowids not monotonically increasing: {:?} (start={}, stop={}, step={})",
rowids,
start,
stop,
step
"Rowids not monotonically increasing: {rowids:?} (start={start}, stop={stop}, step={step})"
);
}
@@ -589,19 +554,13 @@ mod tests {
let step = series.step;
let values = collect_series(series.clone()).unwrap_or_else(|e| {
panic!(
"Failed to generate series for start={}, stop={}, step={}: {:?}",
start, stop, step, e
)
panic!("Failed to generate series for start={start}, stop={stop}, step={step}: {e:?}")
});
if series_is_invalid_or_empty(&series) {
assert!(
values.is_empty(),
"Series should be empty for invalid range: start={}, stop={}, step={}",
start,
stop,
step
"Series should be empty for invalid range: start={start}, stop={stop}, step={step}"
);
} else if start == stop {
assert_eq!(

View File

@@ -1636,7 +1636,7 @@ impl BTreeCursor {
if matches!(
self.seek_state,
CursorSeekState::Start { .. }
CursorSeekState::Start
| CursorSeekState::MovingBetweenPages { .. }
| CursorSeekState::InteriorPageBinarySearch { .. }
) {
@@ -1754,7 +1754,7 @@ impl BTreeCursor {
) -> Result<CursorResult<bool>> {
if matches!(
self.seek_state,
CursorSeekState::Start { .. }
CursorSeekState::Start
| CursorSeekState::MovingBetweenPages { .. }
| CursorSeekState::InteriorPageBinarySearch { .. }
) {
@@ -2123,7 +2123,7 @@ impl BTreeCursor {
} else {
write_info.state = WriteState::BalanceStart;
// If we balance, we must save the cursor position and seek to it later.
// FIXME: we shouldn't have both DeleteState::SeekAfterBalancing and
// FIXME: we shouldn't have both DeleteState::SeekAfterBalancing and
// save_context()/restore/context(), they are practically the same thing.
self.save_context(CursorContext::TableRowId(bkey.to_rowid()));
}
@@ -2154,14 +2154,14 @@ impl BTreeCursor {
} else {
write_info.state = WriteState::BalanceStart;
// If we balance, we must save the cursor position and seek to it later.
// FIXME: we shouldn't have both DeleteState::SeekAfterBalancing and
// FIXME: we shouldn't have both DeleteState::SeekAfterBalancing and
// save_context()/restore/context(), they are practically the same thing.
self.save_context(CursorContext::IndexKeyRowId((*record).clone()));
}
continue;
}
}
other => panic!("unexpected cell type, expected TableLeaf or IndexLeaf, found: {:?}", other),
other => panic!("unexpected cell type, expected TableLeaf or IndexLeaf, found: {other:?}"),
}
}
// insert cell
@@ -2295,7 +2295,7 @@ impl BTreeCursor {
return_if_io!(self.balance_non_root());
}
WriteState::Finish => return Ok(CursorResult::Ok(())),
_ => panic!("unexpected state on balance {:?}", state),
_ => panic!("unexpected state on balance {state:?}"),
}
}
}
@@ -2356,9 +2356,7 @@ impl BTreeCursor {
);
turso_assert!(
page_to_balance_idx <= parent_contents.cell_count(),
"page_to_balance_idx={} is out of bounds for parent cell count {}",
page_to_balance_idx,
number_of_cells_in_parent
"page_to_balance_idx={page_to_balance_idx} is out of bounds for parent cell count {number_of_cells_in_parent}"
);
// As there will be at maximum 3 pages used to balance:
// sibling_pointer is the index represeneting one of those 3 pages, and we initialize it to the last possible page.
@@ -3075,8 +3073,7 @@ impl BTreeCursor {
// FIXME: remove this lock
turso_assert!(
left_pointer <= header_accessor::get_database_size(&self.pager)?,
"invalid page number divider left pointer {} > database number of pages",
left_pointer,
"invalid page number divider left pointer {left_pointer} > database number of pages",
);
// FIXME: defragment shouldn't be needed
// defragment_page(parent_contents, self.usable_space() as u16);
@@ -4071,7 +4068,7 @@ impl BTreeCursor {
.reusable_immutable_record
.borrow()
.as_ref()
.map_or(true, |record| record.is_invalidated());
.is_none_or(|record| record.is_invalidated());
if !invalidated {
*self.parse_record_state.borrow_mut() = ParseRecordState::Init;
let record_ref =
@@ -5319,7 +5316,7 @@ fn validate_cells_after_insertion(cell_array: &CellArray, leaf_data: bool) {
assert!(cell.len() >= 4);
if leaf_data {
assert!(cell[0] != 0, "payload is {:?}", cell);
assert!(cell[0] != 0, "payload is {cell:?}");
}
}
}
@@ -6038,10 +6035,7 @@ fn debug_validate_cells_core(page: &PageContent, usable_space: u16) {
// Rowid 1 (stored as SerialTypeKind::ConstInt1)
assert!(
size >= 2,
"cell size should be at least 2 bytes idx={}, cell={:?}, offset={}",
i,
buf,
offset
"cell size should be at least 2 bytes idx={i}, cell={buf:?}, offset={offset}"
);
if page.is_leaf() {
assert!(page.as_ptr()[offset] != 0);
@@ -6574,7 +6568,7 @@ mod tests {
valid &= child_valid;
child_depth
}
_ => panic!("unsupported btree cell: {:?}", cell),
_ => panic!("unsupported btree cell: {cell:?}"),
};
if current_depth >= 100 {
tracing::error!("depth is too big");
@@ -6599,7 +6593,7 @@ mod tests {
}
previous_key = Some(rowid);
}
_ => panic!("unsupported btree cell: {:?}", cell),
_ => panic!("unsupported btree cell: {cell:?}"),
}
}
if let Some(right) = contents.rightmost_pointer() {
@@ -6677,7 +6671,7 @@ mod tests {
cell.first_overflow_page.is_some()
));
}
_ => panic!("unsupported btree cell: {:?}", cell),
_ => panic!("unsupported btree cell: {cell:?}"),
}
}
if let Some(rightmost) = contents.rightmost_pointer() {
@@ -6794,8 +6788,7 @@ mod tests {
cursor.seek(seek_key, SeekOp::GE { eq_only: true }).unwrap(),
CursorResult::Ok(true)
),
"key {} is not found",
key
"key {key} is not found"
);
}
}
@@ -6823,7 +6816,7 @@ mod tests {
) {
const VALIDATE_INTERVAL: usize = 1000;
let do_validate_btree = std::env::var("VALIDATE_BTREE")
.map_or(false, |v| v.parse().expect("validate should be bool"));
.is_ok_and(|v| v.parse().expect("validate should be bool"));
let (mut rng, seed) = rng_from_time_or_env();
let mut seen = HashSet::new();
tracing::info!("super seed: {}", seed);
@@ -6900,7 +6893,7 @@ mod tests {
.unwrap();
if *key != cursor_rowid {
valid = false;
println!("key {} is not found, got {}", key, cursor_rowid);
println!("key {key} is not found, got {cursor_rowid}");
break;
}
}
@@ -6910,8 +6903,8 @@ mod tests {
&& (!valid || matches!(validate_btree(pager.clone(), root_page), (_, false)))
{
let btree_after = format_btree(pager.clone(), root_page, 0);
println!("btree before:\n{}", btree_before);
println!("btree after:\n{}", btree_after);
println!("btree before:\n{btree_before}");
println!("btree after:\n{btree_after}");
panic!("invalid btree");
}
pager.end_read_tx().unwrap();
@@ -6933,8 +6926,7 @@ mod tests {
.unwrap();
assert_eq!(
*key, cursor_rowid,
"key {} is not found, got {}",
key, cursor_rowid
"key {key} is not found, got {cursor_rowid}"
);
}
pager.end_read_tx().unwrap();
@@ -7328,8 +7320,7 @@ mod tests {
let leaf_page_id = contents.read_u32(8 + (i as usize * 4));
assert!(
(2..=4).contains(&leaf_page_id),
"Leaf page ID {} should be in range 2-4",
leaf_page_id
"Leaf page ID {leaf_page_id} should be in range 2-4"
);
}
}
@@ -8073,7 +8064,7 @@ mod tests {
let mut cursor = BTreeCursor::new_table(None, pager.clone(), root_page);
let key = Value::Integer(*key);
let exists = run_until_done(|| cursor.exists(&key), pager.deref()).unwrap();
assert!(exists, "key not found {}", key);
assert!(exists, "key not found {key}");
}
}
@@ -8168,7 +8159,7 @@ mod tests {
let mut cursor = BTreeCursor::new_table(None, pager.clone(), root_page);
let key = Value::Integer(i);
let exists = run_until_done(|| cursor.exists(&key), pager.deref()).unwrap();
assert!(exists, "Key {} should exist but doesn't", i);
assert!(exists, "Key {i} should exist but doesn't");
}
// Verify the deleted records don't exist.
@@ -8176,7 +8167,7 @@ mod tests {
let mut cursor = BTreeCursor::new_table(None, pager.clone(), root_page);
let key = Value::Integer(i);
let exists = run_until_done(|| cursor.exists(&key), pager.deref()).unwrap();
assert!(!exists, "Deleted key {} still exists", i);
assert!(!exists, "Deleted key {i} still exists");
}
}

View File

@@ -418,15 +418,13 @@ impl DumbLruPageCache {
if forward_count > map_len + 5 {
panic!(
"Infinite loop suspected in forward integrity check. Size {}, count {}",
map_len, forward_count
"Infinite loop suspected in forward integrity check. Size {map_len}, count {forward_count}"
);
}
}
assert_eq!(
forward_count, map_len,
"Forward count mismatch (counted {}, map has {})",
forward_count, map_len
"Forward count mismatch (counted {forward_count}, map has {map_len})"
);
assert_eq!(
tail_ptr, last_ptr,
@@ -457,15 +455,13 @@ impl DumbLruPageCache {
}
if backward_count > map_len + 5 {
panic!(
"Infinite loop suspected in backward integrity check. Size {}, count {}",
map_len, backward_count
"Infinite loop suspected in backward integrity check. Size {map_len}, count {backward_count}"
);
}
}
assert_eq!(
backward_count, map_len,
"Backward count mismatch (counted {}, map has {})",
backward_count, map_len
"Backward count mismatch (counted {backward_count}, map has {map_len})"
);
assert_eq!(
head_ptr, last_ptr,
@@ -1018,7 +1014,7 @@ mod tests {
Err(CacheError::Full | CacheError::ActiveRefs) => {} // Ignore
Err(err) => {
// Any other error should fail the test
panic!("Cache insertion failed: {:?}", err);
panic!("Cache insertion failed: {err:?}");
}
Ok(_) => {
lru.push(key, page);
@@ -1051,7 +1047,7 @@ mod tests {
}
cache.verify_list_integrity();
for (key, page) in &lru {
println!("getting page {:?}", key);
println!("getting page {key:?}");
cache.peek(key, false).unwrap();
assert_eq!(page.get().id, key.pgno);
}

View File

@@ -344,8 +344,7 @@ impl Pager {
Some(content) => content,
None => {
return Err(LimboError::InternalError(format!(
"Ptrmap page {} content not loaded",
ptrmap_pg_no
"Ptrmap page {ptrmap_pg_no} content not loaded"
)))
}
};
@@ -367,8 +366,7 @@ impl Pager {
// Check if the calculated offset for the entry is within the bounds of the actual page data length.
if offset_in_ptrmap_page + PTRMAP_ENTRY_SIZE > actual_data_length {
return Err(LimboError::InternalError(format!(
"Ptrmap offset {} + entry size {} out of bounds for page {} (actual data len {})",
offset_in_ptrmap_page, PTRMAP_ENTRY_SIZE, ptrmap_pg_no, actual_data_length
"Ptrmap offset {offset_in_ptrmap_page} + entry size {PTRMAP_ENTRY_SIZE} out of bounds for page {ptrmap_pg_no} (actual data len {actual_data_length})"
)));
}
@@ -377,8 +375,7 @@ impl Pager {
match PtrmapEntry::deserialize(entry_slice) {
Some(entry) => Ok(CursorResult::Ok(Some(entry))),
None => Err(LimboError::Corrupt(format!(
"Failed to deserialize ptrmap entry for page {} from ptrmap page {}",
target_page_num, ptrmap_pg_no
"Failed to deserialize ptrmap entry for page {target_page_num} from ptrmap page {ptrmap_pg_no}"
))),
}
}
@@ -406,8 +403,7 @@ impl Pager {
|| is_ptrmap_page(db_page_no_to_update, page_size)
{
return Err(LimboError::InternalError(format!(
"Cannot set ptrmap entry for page {}: it's a header/ptrmap page or invalid.",
db_page_no_to_update
"Cannot set ptrmap entry for page {db_page_no_to_update}: it's a header/ptrmap page or invalid."
)));
}
@@ -436,8 +432,7 @@ impl Pager {
Some(content) => content,
None => {
return Err(LimboError::InternalError(format!(
"Ptrmap page {} content not loaded",
ptrmap_pg_no
"Ptrmap page {ptrmap_pg_no} content not loaded"
)))
}
};
@@ -525,7 +520,7 @@ impl Pager {
// For now map allocated_page_id since we are not swapping it with root_page_num
match self.ptrmap_put(allocated_page_id, PtrmapType::RootPage, 0)? {
CursorResult::Ok(_) => Ok(CursorResult::Ok(allocated_page_id as u32)),
CursorResult::Ok(_) => Ok(CursorResult::Ok(allocated_page_id)),
CursorResult::IO => Ok(CursorResult::IO),
}
}
@@ -707,8 +702,7 @@ impl Pager {
}
Err(e) => {
return Err(LimboError::InternalError(format!(
"Failed to insert page into cache: {:?}",
e
"Failed to insert page into cache: {e:?}"
)))
}
}
@@ -729,8 +723,7 @@ impl Pager {
}
Err(e) => {
return Err(LimboError::InternalError(format!(
"Failed to insert page into cache: {:?}",
e
"Failed to insert page into cache: {e:?}"
)))
}
}
@@ -960,7 +953,7 @@ impl Pager {
checkpoint_result = res;
break;
}
Err(err) => panic!("error while clearing cache {}", err),
Err(err) => panic!("error while clearing cache {err}"),
}
}
// TODO: only clear cache of things that are really invalidated
@@ -984,8 +977,7 @@ impl Pager {
if page_id < 2 || page_id > header_accessor::get_database_size(self)? as usize {
return Err(LimboError::Corrupt(format!(
"Invalid page number {} for free operation",
page_id
"Invalid page number {page_id} for free operation"
)));
}
@@ -1191,8 +1183,7 @@ impl Pager {
.insert_ignore_existing(page_key, page.clone())
.map_err(|e| {
LimboError::InternalError(format!(
"Failed to insert loaded page {} into cache: {:?}",
id, e
"Failed to insert loaded page {id} into cache: {e:?}"
))
})?;
page.set_loaded();
@@ -1427,14 +1418,12 @@ mod ptrmap {
|| db_page_no_to_query > last_data_page_mapped
{
return Err(LimboError::InternalError(format!(
"Page {} is not mapped by the data page range [{}, {}] of ptrmap page {}",
db_page_no_to_query, first_data_page_mapped, last_data_page_mapped, ptrmap_page_no
"Page {db_page_no_to_query} is not mapped by the data page range [{first_data_page_mapped}, {last_data_page_mapped}] of ptrmap page {ptrmap_page_no}"
)));
}
if is_ptrmap_page(db_page_no_to_query, page_size) {
return Err(LimboError::InternalError(format!(
"Page {} is a pointer map page and should not have an entry calculated this way.",
db_page_no_to_query
"Page {db_page_no_to_query} is a pointer map page and should not have an entry calculated this way."
)));
}
@@ -1551,12 +1540,12 @@ mod ptrmap_tests {
panic!("test_pager_setup: btree_create returned CursorResult::IO unexpectedly");
}
Err(e) => {
panic!("test_pager_setup: btree_create failed: {:?}", e);
panic!("test_pager_setup: btree_create failed: {e:?}");
}
}
}
return pager;
pager
}
#[test]
@@ -1636,7 +1625,7 @@ mod ptrmap_tests {
assert_eq!(get_ptrmap_offset_in_page(3, 2, page_size).unwrap(), 0);
assert_eq!(
get_ptrmap_offset_in_page(4, 2, page_size).unwrap(),
1 * PTRMAP_ENTRY_SIZE
PTRMAP_ENTRY_SIZE
);
assert_eq!(
get_ptrmap_offset_in_page(5, 2, page_size).unwrap(),
@@ -1650,7 +1639,7 @@ mod ptrmap_tests {
assert_eq!(get_ptrmap_offset_in_page(106, 105, page_size).unwrap(), 0);
assert_eq!(
get_ptrmap_offset_in_page(107, 105, page_size).unwrap(),
1 * PTRMAP_ENTRY_SIZE
PTRMAP_ENTRY_SIZE
);
assert_eq!(
get_ptrmap_offset_in_page(108, 105, page_size).unwrap(),

View File

@@ -356,7 +356,7 @@ impl TryFrom<u8> for PageType {
5 => Ok(Self::TableInterior),
10 => Ok(Self::IndexLeaf),
13 => Ok(Self::TableLeaf),
_ => Err(LimboError::Corrupt(format!("Invalid page type: {}", value))),
_ => Err(LimboError::Corrupt(format!("Invalid page type: {value}"))),
}
}
}
@@ -545,9 +545,7 @@ impl PageContent {
let ncells = self.cell_count();
assert!(
idx < ncells,
"cell_get: idx out of bounds: idx={}, ncells={}",
idx,
ncells
"cell_get: idx out of bounds: idx={idx}, ncells={ncells}"
);
let cell_pointer_array_start = self.header_size();
let cell_pointer = cell_pointer_array_start + (idx * CELL_PTR_SIZE_BYTES);
@@ -708,7 +706,7 @@ impl PageContent {
let mut pc = self.first_freeblock() as usize;
let mut block_num = 0;
println!("---- Free List Blocks ----");
println!("first freeblock pointer: {}", pc);
println!("first freeblock pointer: {pc}");
println!("cell content area: {}", self.cell_content_area());
println!("fragmented bytes: {}", self.num_frag_free_bytes());
@@ -716,10 +714,7 @@ impl PageContent {
let next = self.read_u16_no_offset(pc);
let size = self.read_u16_no_offset(pc + 2);
println!(
"block {}: position={}, size={}, next={}",
block_num, pc, size, next
);
println!("block {block_num}: position={pc}, size={size}, next={next}");
pc = next as usize;
block_num += 1;
}
@@ -1379,7 +1374,7 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<UnsafeCell<WalFi
if !(MIN_PAGE_SIZE..=MAX_PAGE_SIZE).contains(&page_size_u32)
|| page_size_u32.count_ones() != 1
{
panic!("Invalid page size in WAL header: {}", page_size_u32);
panic!("Invalid page size in WAL header: {page_size_u32}");
}
let page_size = page_size_u32 as usize;

View File

@@ -24,7 +24,7 @@ pub enum CollationSeq {
impl CollationSeq {
pub fn new(collation: &str) -> crate::Result<Self> {
CollationSeq::from_str(collation).map_err(|_| {
crate::LimboError::ParseError(format!("no such collation sequence: {}", collation))
crate::LimboError::ParseError(format!("no such collation sequence: {collation}"))
})
}

View File

@@ -38,14 +38,14 @@ impl Display for Plan {
} => {
for (plan, operator) in left {
plan.fmt(f)?;
writeln!(f, "{}", operator)?;
writeln!(f, "{operator}")?;
}
right_most.fmt(f)?;
if let Some(limit) = limit {
writeln!(f, "LIMIT: {}", limit)?;
writeln!(f, "LIMIT: {limit}")?;
}
if let Some(offset) = offset {
writeln!(f, "OFFSET: {}", offset)?;
writeln!(f, "OFFSET: {offset}")?;
}
if let Some(order_by) = order_by {
writeln!(f, "ORDER BY:")?;
@@ -95,7 +95,7 @@ impl Display for SelectPlan {
format!("{} AS {}", reference.table.get_name(), reference.identifier)
};
writeln!(f, "{}SCAN {}", indent, table_name)?;
writeln!(f, "{indent}SCAN {table_name}")?;
}
Operation::Search(search) => match search {
Search::RowidEq { .. } | Search::Seek { index: None, .. } => {
@@ -137,7 +137,7 @@ impl Display for DeletePlan {
format!("{} AS {}", reference.table.get_name(), reference.identifier)
};
writeln!(f, "{}DELETE FROM {}", indent, table_name)?;
writeln!(f, "{indent}DELETE FROM {table_name}")?;
}
Operation::Search { .. } => {
panic!("DELETE plans should not contain search operations");
@@ -173,9 +173,9 @@ impl fmt::Display for UpdatePlan {
};
if i == 0 {
writeln!(f, "{}UPDATE {}", indent, table_name)?;
writeln!(f, "{indent}UPDATE {table_name}")?;
} else {
writeln!(f, "{}SCAN {}", indent, table_name)?;
writeln!(f, "{indent}SCAN {table_name}")?;
}
}
Operation::Search(search) => match search {
@@ -214,7 +214,7 @@ impl fmt::Display for UpdatePlan {
}
}
if let Some(limit) = self.limit {
writeln!(f, "LIMIT: {}", limit)?;
writeln!(f, "LIMIT: {limit}")?;
}
if let Some(ret) = &self.returning {
writeln!(f, "RETURNING:")?;
@@ -301,10 +301,10 @@ impl ToSqlString for Plan {
));
}
if let Some(limit) = &limit {
ret.push(format!("LIMIT {}", limit));
ret.push(format!("LIMIT {limit}"));
}
if let Some(offset) = &offset {
ret.push(format!("OFFSET {}", offset));
ret.push(format!("OFFSET {offset}"));
}
ret.join(" ")
}
@@ -364,7 +364,7 @@ impl ToSqlString for SelectPlan {
.map(|e| e.to_sql_string(context))
.collect::<Vec<_>>()
.join(", ");
format!("({})", joined_value)
format!("({joined_value})")
})
.collect::<Vec<_>>()
.join(", ")
@@ -384,7 +384,7 @@ impl ToSqlString for SelectPlan {
cols.expr.to_sql_string(context),
cols.alias
.as_ref()
.map_or("".to_string(), |alias| format!(" AS {}", alias))
.map_or("".to_string(), |alias| format!(" AS {alias}"))
)
})
.collect::<Vec<_>>()
@@ -450,10 +450,10 @@ impl ToSqlString for SelectPlan {
));
}
if let Some(limit) = &self.limit {
ret.push(format!("LIMIT {}", limit));
ret.push(format!("LIMIT {limit}"));
}
if let Some(offset) = &self.offset {
ret.push(format!("OFFSET {}", offset));
ret.push(format!("OFFSET {offset}"));
}
ret.join(" ")
}
@@ -493,10 +493,10 @@ impl ToSqlString for DeletePlan {
));
}
if let Some(limit) = &self.limit {
ret.push(format!("LIMIT {}", limit));
ret.push(format!("LIMIT {limit}"));
}
if let Some(offset) = &self.offset {
ret.push(format!("OFFSET {}", offset));
ret.push(format!("OFFSET {offset}"));
}
ret.join(" ")
}
@@ -560,10 +560,10 @@ impl ToSqlString for UpdatePlan {
));
}
if let Some(limit) = &self.limit {
ret.push(format!("LIMIT {}", limit));
ret.push(format!("LIMIT {limit}"));
}
if let Some(offset) = &self.offset {
ret.push(format!("OFFSET {}", offset));
ret.push(format!("OFFSET {offset}"));
}
ret.join(" ")
}

View File

@@ -226,7 +226,7 @@ pub fn translate_create_index(
p5: 0,
});
// Parse the schema table to get the index root page and add new index to Schema
let parse_schema_where_clause = format!("name = '{}' AND type = 'index'", idx_name);
let parse_schema_where_clause = format!("name = '{idx_name}' AND type = 'index'");
program.emit_insn(Insn::ParseSchema {
db: sqlite_schema_cursor_id,
where_clause: Some(parse_schema_where_clause),

View File

@@ -682,7 +682,7 @@ fn resolve_columns_for_insert<'a>(
let table_index = table_columns.iter().position(|c| {
c.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&column_name))
.is_some_and(|name| name.eq_ignore_ascii_case(&column_name))
});
let Some(table_index) = table_index else {
@@ -743,7 +743,7 @@ fn resolve_indicies_for_insert(
.column
.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&target_name))
.is_some_and(|name| name.eq_ignore_ascii_case(&target_name))
}) {
idx_map.columns.push((i, idx_col.clone()));
idx_map.value_indicies.push(col_mapping.value_index);

View File

@@ -98,7 +98,7 @@ pub fn find_best_access_method_for_join_order<'a>(
match &candidate.index {
Some(index) => index.columns[i].pos_in_table == order_target.0[i].column_no,
None => {
rowid_column_idx.map_or(false, |idx| idx == order_target.0[i].column_no)
rowid_column_idx.is_some_and(|idx| idx == order_target.0[i].column_no)
}
}
};

View File

@@ -293,7 +293,7 @@ pub fn constraints_from_where_clause(
// For each constraint we found, add a reference to it for each index that may be able to use it.
for (i, constraint) in cs.constraints.iter().enumerate() {
if rowid_alias_column.map_or(false, |idx| constraint.table_col_pos == idx) {
if rowid_alias_column == Some(constraint.table_col_pos) {
let rowid_candidate = cs
.candidates
.iter_mut()
@@ -325,7 +325,7 @@ pub fn constraints_from_where_clause(
if candidate
.index
.as_ref()
.map_or(false, |i| Arc::ptr_eq(index, i))
.is_some_and(|i| Arc::ptr_eq(index, i))
{
Some(candidate)
} else {
@@ -409,6 +409,6 @@ fn opposite_cmp_op(op: ast::Operator) -> ast::Operator {
ast::Operator::GreaterEquals => ast::Operator::LessEquals,
ast::Operator::Less => ast::Operator::Greater,
ast::Operator::LessEquals => ast::Operator::GreaterEquals,
_ => panic!("unexpected operator: {:?}", op),
_ => panic!("unexpected operator: {op:?}"),
}
}

View File

@@ -217,7 +217,7 @@ pub fn compute_best_join_order<'a>(
let left_join_illegal_map = {
let left_join_count = joined_tables
.iter()
.filter(|t| t.join_info.as_ref().map_or(false, |j| j.outer))
.filter(|t| t.join_info.as_ref().is_some_and(|j| j.outer))
.count();
if left_join_count == 0 {
None
@@ -227,7 +227,7 @@ pub fn compute_best_join_order<'a>(
HashMap::with_capacity(left_join_count);
for (i, _) in joined_tables.iter().enumerate() {
for (j, joined_table) in joined_tables.iter().enumerate().skip(i + 1) {
if joined_table.join_info.as_ref().map_or(false, |j| j.outer) {
if joined_table.join_info.as_ref().is_some_and(|j| j.outer) {
// bitwise OR the masks
if let Some(illegal_lhs) = left_join_illegal_map.get_mut(&i) {
illegal_lhs.add_table(j);
@@ -296,7 +296,7 @@ pub fn compute_best_join_order<'a>(
is_outer: joined_tables[table_no]
.join_info
.as_ref()
.map_or(false, |j| j.outer),
.is_some_and(|j| j.outer),
});
}
join_order.push(JoinOrderMember {
@@ -305,7 +305,7 @@ pub fn compute_best_join_order<'a>(
is_outer: joined_tables[rhs_idx]
.join_info
.as_ref()
.map_or(false, |j| j.outer),
.is_some_and(|j| j.outer),
});
assert!(join_order.len() == subset_size);
@@ -406,7 +406,7 @@ pub fn compute_naive_left_deep_plan<'a>(
.map(|(i, t)| JoinOrderMember {
table_id: t.internal_id,
original_idx: i,
is_outer: t.join_info.as_ref().map_or(false, |j| j.outer),
is_outer: t.join_info.as_ref().is_some_and(|j| j.outer),
})
.collect::<Vec<_>>();
@@ -828,7 +828,7 @@ mod tests {
.iter()
.for_each(|table_name| {
// add primary key index called sqlite_autoindex_<tablename>_1
let index_name = format!("sqlite_autoindex_{}_1", table_name);
let index_name = format!("sqlite_autoindex_{table_name}_1");
let index = Arc::new(Index {
name: index_name,
table_name: table_name.to_string(),
@@ -1063,10 +1063,7 @@ mod tests {
// Create fact table with foreign keys to all dimension tables
let mut fact_columns = vec![_create_column_rowid_alias("id")];
for i in 0..NUM_DIM_TABLES {
fact_columns.push(_create_column_of_type(
&format!("dim{}_id", i),
Type::Integer,
));
fact_columns.push(_create_column_of_type(&format!("dim{i}_id"), Type::Integer));
}
let fact_table = _create_btree_table("fact", fact_columns);
@@ -1074,7 +1071,7 @@ mod tests {
let dim_tables: Vec<_> = (0..NUM_DIM_TABLES)
.map(|i| {
_create_btree_table(
&format!("dim{}", i),
&format!("dim{i}"),
vec![
_create_column_rowid_alias("id"),
_create_column_of_type("value", Type::Integer),

View File

@@ -231,7 +231,7 @@ fn optimize_table_access(
is_outer: joined_tables[table_number]
.join_info
.as_ref()
.map_or(false, |join_info| join_info.outer),
.is_some_and(|join_info| join_info.outer),
})
.collect();
@@ -334,8 +334,7 @@ fn optimize_table_access(
}
assert!(
constraint_refs.len() == 1,
"expected exactly one constraint for rowid seek, got {:?}",
constraint_refs
"expected exactly one constraint for rowid seek, got {constraint_refs:?}"
);
let constraint = &constraints_per_table[table_idx].constraints
[constraint_refs[0].constraint_vec_pos];
@@ -467,14 +466,10 @@ pub trait Optimizable {
// return a [ConstantPredicate].
fn check_always_true_or_false(&self) -> Result<Option<AlwaysTrueOrFalse>>;
fn is_always_true(&self) -> Result<bool> {
Ok(self
.check_always_true_or_false()?
.map_or(false, |c| c == AlwaysTrueOrFalse::AlwaysTrue))
Ok(self.check_always_true_or_false()? == Some(AlwaysTrueOrFalse::AlwaysTrue))
}
fn is_always_false(&self) -> Result<bool> {
Ok(self
.check_always_true_or_false()?
.map_or(false, |c| c == AlwaysTrueOrFalse::AlwaysFalse))
Ok(self.check_always_true_or_false()? == Some(AlwaysTrueOrFalse::AlwaysFalse))
}
fn is_constant(&self, resolver: &Resolver<'_>) -> bool;
fn is_nonnull(&self, tables: &TableReferences) -> bool;
@@ -499,13 +494,13 @@ impl Optimizable for ast::Expr {
else_expr,
..
} => {
base.as_ref().map_or(true, |base| base.is_nonnull(tables))
base.as_ref().is_none_or(|base| base.is_nonnull(tables))
&& when_then_pairs
.iter()
.all(|(_, then)| then.is_nonnull(tables))
&& else_expr
.as_ref()
.map_or(true, |else_expr| else_expr.is_nonnull(tables))
.is_none_or(|else_expr| else_expr.is_nonnull(tables))
}
Expr::Cast { expr, .. } => expr.is_nonnull(tables),
Expr::Collate(expr, _) => expr.is_nonnull(tables),
@@ -536,7 +531,7 @@ impl Optimizable for ast::Expr {
lhs.is_nonnull(tables)
&& rhs
.as_ref()
.map_or(true, |rhs| rhs.iter().all(|rhs| rhs.is_nonnull(tables)))
.is_none_or(|rhs| rhs.iter().all(|rhs| rhs.is_nonnull(tables)))
}
Expr::InSelect { .. } => false,
Expr::InTable { .. } => false,
@@ -582,14 +577,13 @@ impl Optimizable for ast::Expr {
when_then_pairs,
else_expr,
} => {
base.as_ref()
.map_or(true, |base| base.is_constant(resolver))
base.as_ref().is_none_or(|base| base.is_constant(resolver))
&& when_then_pairs.iter().all(|(when, then)| {
when.is_constant(resolver) && then.is_constant(resolver)
})
&& else_expr
.as_ref()
.map_or(true, |else_expr| else_expr.is_constant(resolver))
.is_none_or(|else_expr| else_expr.is_constant(resolver))
}
Expr::Cast { expr, .. } => expr.is_constant(resolver),
Expr::Collate(expr, _) => expr.is_constant(resolver),
@@ -604,9 +598,9 @@ impl Optimizable for ast::Expr {
return false;
};
func.is_deterministic()
&& args.as_ref().map_or(true, |args| {
args.iter().all(|arg| arg.is_constant(resolver))
})
&& args
.as_ref()
.is_none_or(|args| args.iter().all(|arg| arg.is_constant(resolver)))
}
Expr::FunctionCallStar { .. } => false,
Expr::Id(_) => panic!("Id should have been rewritten as Column"),
@@ -616,7 +610,7 @@ impl Optimizable for ast::Expr {
lhs.is_constant(resolver)
&& rhs
.as_ref()
.map_or(true, |rhs| rhs.iter().all(|rhs| rhs.is_constant(resolver)))
.is_none_or(|rhs| rhs.iter().all(|rhs| rhs.is_constant(resolver)))
}
Expr::InSelect { .. } => {
false // might be constant, too annoying to check subqueries etc. implement later
@@ -630,7 +624,7 @@ impl Optimizable for ast::Expr {
&& rhs.is_constant(resolver)
&& escape
.as_ref()
.map_or(true, |escape| escape.is_constant(resolver))
.is_none_or(|escape| escape.is_constant(resolver))
}
Expr::Literal(_) => true,
Expr::Name(_) => false,
@@ -639,9 +633,7 @@ impl Optimizable for ast::Expr {
Expr::Qualified(_, _) => {
panic!("Qualified should have been rewritten as Column")
}
Expr::Raise(_, expr) => expr
.as_ref()
.map_or(true, |expr| expr.is_constant(resolver)),
Expr::Raise(_, expr) => expr.as_ref().is_none_or(|expr| expr.is_constant(resolver)),
Expr::Subquery(_) => false,
Expr::Unary(_, expr) => expr.is_constant(resolver),
Expr::Variable(_) => false,
@@ -816,7 +808,7 @@ fn ephemeral_index_build(
has_rowid: table_reference
.table
.btree()
.map_or(false, |btree| btree.has_rowid),
.is_some_and(|btree| btree.has_rowid),
};
ephemeral_index
@@ -1322,7 +1314,7 @@ pub fn rewrite_expr(top_level_expr: &mut ast::Expr, param_idx: &mut usize) -> Re
if var.is_empty() {
// rewrite anonymous variables only, ensure that the `param_idx` starts at 1 and
// all the expressions are rewritten in the order they come in the statement
*expr = ast::Expr::Variable(format!("{}{param_idx}", PARAM_PREFIX));
*expr = ast::Expr::Variable(format!("{PARAM_PREFIX}{param_idx}"));
*param_idx += 1;
}
}

View File

@@ -562,7 +562,7 @@ pub fn select_star(tables: &[JoinedTable], out_columns: &mut Vec<ResultSetColumn
!using_cols.iter().any(|using_col| {
col.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&using_col.0))
.is_some_and(|name| name.eq_ignore_ascii_case(&using_col.0))
})
} else {
true
@@ -811,10 +811,7 @@ impl TableReferences {
{
outer_query_ref.mark_column_used(column_index);
} else {
panic!(
"table with internal id {} not found in table references",
internal_id
);
panic!("table with internal id {internal_id} not found in table references");
}
}
@@ -964,7 +961,7 @@ impl JoinedTable {
match &self.table {
Table::BTree(btree) => {
let use_covering_index = self.utilizes_covering_index();
let index_is_ephemeral = index.map_or(false, |index| index.ephemeral);
let index_is_ephemeral = index.is_some_and(|index| index.ephemeral);
let table_not_required =
OperationMode::SELECT == mode && use_covering_index && !index_is_ephemeral;
let table_cursor_id = if table_not_required {

View File

@@ -135,7 +135,7 @@ pub fn bind_column_references(
let col_idx = joined_table.table.columns().iter().position(|c| {
c.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&normalized_id))
.is_some_and(|name| name.eq_ignore_ascii_case(&normalized_id))
});
if col_idx.is_some() {
if match_result.is_some() {
@@ -163,7 +163,7 @@ pub fn bind_column_references(
let col_idx = outer_ref.table.columns().iter().position(|c| {
c.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&normalized_id))
.is_some_and(|name| name.eq_ignore_ascii_case(&normalized_id))
});
if col_idx.is_some() {
if match_result.is_some() {
@@ -191,7 +191,7 @@ pub fn bind_column_references(
for result_column in result_columns.iter() {
if result_column
.name(referenced_tables)
.map_or(false, |name| name.eq_ignore_ascii_case(&normalized_id))
.is_some_and(|name| name.eq_ignore_ascii_case(&normalized_id))
{
*expr = result_column.expr.clone();
return Ok(());
@@ -218,7 +218,7 @@ pub fn bind_column_references(
let col_idx = tbl.columns().iter().position(|c| {
c.name
.as_ref()
.map_or(false, |name| name.eq_ignore_ascii_case(&normalized_id))
.is_some_and(|name| name.eq_ignore_ascii_case(&normalized_id))
});
let Some(col_idx) = col_idx else {
crate::bail_parse_error!("Column {} not found", normalized_id);
@@ -340,7 +340,7 @@ fn parse_from_clause_table(
ast::As::As(id) => id.0.clone(),
ast::As::Elided(id) => id.0.clone(),
})
.unwrap_or(format!("subquery_{}", cur_table_index));
.unwrap_or(format!("subquery_{cur_table_index}"));
table_references.add_joined_table(JoinedTable::new_subquery(
identifier,
subplan,
@@ -808,7 +808,7 @@ fn parse_join(
.find(|(_, col)| {
col.name
.as_ref()
.map_or(false, |name| *name == name_normalized)
.is_some_and(|name| *name == name_normalized)
})
.map(|(idx, col)| (left_table_idx, left_table.internal_id, idx, col));
if left_col.is_some() {
@@ -824,7 +824,7 @@ fn parse_join(
let right_col = right_table.columns().iter().enumerate().find(|(_, col)| {
col.name
.as_ref()
.map_or(false, |name| *name == name_normalized)
.is_some_and(|name| *name == name_normalized)
});
if right_col.is_none() {
crate::bail_parse_error!(

View File

@@ -263,7 +263,7 @@ fn query_pragma(
Some(ast::Expr::Name(name)) => {
let mode_name = normalize_ident(&name.0);
CheckpointMode::from_str(&mode_name).map_err(|e| {
LimboError::ParseError(format!("Unknown Checkpoint Mode: {}", e))
LimboError::ParseError(format!("Unknown Checkpoint Mode: {e}"))
})?
}
_ => CheckpointMode::Passive,

View File

@@ -74,7 +74,7 @@ pub fn translate_select(
.sum::<usize>(),
}
}
other => panic!("plan is not a SelectPlan: {:?}", other),
other => panic!("plan is not a SelectPlan: {other:?}"),
};
program.extend(&opts);
@@ -148,7 +148,7 @@ pub fn prepare_select_plan(
let (limit, offset) = select.limit.map_or(Ok((None, None)), |l| parse_limit(&l))?;
// FIXME: handle OFFSET for compound selects
if offset.map_or(false, |o| o > 0) {
if offset.is_some_and(|o| o > 0) {
crate::bail_parse_error!("OFFSET is not supported for compound SELECTs yet");
}
// FIXME: handle ORDER BY for compound selects
@@ -257,7 +257,7 @@ fn prepare_one_select_plan(
.map(|(i, t)| JoinOrderMember {
table_id: t.internal_id,
original_idx: i,
is_outer: t.join_info.as_ref().map_or(false, |j| j.outer),
is_outer: t.join_info.as_ref().is_some_and(|j| j.outer),
})
.collect(),
table_references,

View File

@@ -39,7 +39,7 @@ impl Display for ValueType {
Self::Text => "TEXT",
Self::Error => "ERROR",
};
write!(f, "{}", value)
write!(f, "{value}")
}
}
@@ -132,7 +132,7 @@ fn float_to_string<S>(float: &f64, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&format!("{}", float))
serializer.serialize_str(&format!("{float}"))
}
#[cfg(feature = "serde")]
@@ -317,7 +317,7 @@ impl Display for Value {
match self {
Self::Null => write!(f, ""),
Self::Integer(i) => {
write!(f, "{}", i)
write!(f, "{i}")
}
Self::Float(fl) => {
let fl = *fl;
@@ -337,7 +337,7 @@ impl Display for Value {
// handle scientific notation without trailing zeros
if (fl.abs() < 1e-4 || fl.abs() >= 1e15) && fl != 0.0 {
let sci_notation = format!("{:.14e}", fl);
let sci_notation = format!("{fl:.14e}");
let parts: Vec<&str> = sci_notation.split('e').collect();
if parts.len() == 2 {
@@ -358,7 +358,7 @@ impl Display for Value {
let trimmed_mantissa = if fraction.is_empty() {
whole.to_string()
} else {
format!("{}.{}", whole, fraction)
format!("{whole}.{fraction}")
};
let (prefix, exponent) =
if let Some(stripped_exponent) = exponent.strip_prefix('-') {
@@ -366,12 +366,12 @@ impl Display for Value {
} else {
("+", exponent)
};
return write!(f, "{}e{}{}", trimmed_mantissa, prefix, exponent);
return write!(f, "{trimmed_mantissa}e{prefix}{exponent}");
}
}
// fallback
return write!(f, "{}", sci_notation);
return write!(f, "{sci_notation}");
}
// handle floating point max size is 15.
@@ -381,15 +381,15 @@ impl Display for Value {
if (fl - rounded).abs() < 1e-14 {
// if we very close to integer trim decimal part to 1 digit
if rounded == rounded as i64 as f64 {
return write!(f, "{:.1}", fl);
return write!(f, "{fl:.1}");
}
}
let fl_str = format!("{}", fl);
let fl_str = format!("{fl}");
let splitted = fl_str.split('.').collect::<Vec<&str>>();
// fallback
if splitted.len() != 2 {
return write!(f, "{:.14e}", fl);
return write!(f, "{fl:.14e}");
}
let first_part = if fl < 0.0 {
@@ -411,7 +411,7 @@ impl Display for Value {
};
// float that have integer part > 15 converted to sci notation
if reminder < 0 {
return write!(f, "{:.14e}", fl);
return write!(f, "{fl:.14e}");
}
// trim decimal part to reminder or self len so total digits is 15;
let mut fl = format!("{:.*}", second.len().min(reminder as usize), fl);
@@ -419,7 +419,7 @@ impl Display for Value {
while fl.ends_with('0') {
fl.pop();
}
write!(f, "{}", fl)
write!(f, "{fl}")
}
Self::Text(s) => {
write!(f, "{}", s.as_str())
@@ -969,7 +969,7 @@ impl ImmutableRecord {
SerialTypeKind::I32 => writer.extend_from_slice(&(*i as i32).to_be_bytes()),
SerialTypeKind::I48 => writer.extend_from_slice(&i.to_be_bytes()[2..]), // remove 2 most significant bytes
SerialTypeKind::I64 => writer.extend_from_slice(&i.to_be_bytes()),
other => panic!("Serial type is not an integer: {:?}", other),
other => panic!("Serial type is not an integer: {other:?}"),
}
}
Value::Float(f) => {
@@ -1134,8 +1134,8 @@ impl Display for RefValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Null => write!(f, "NULL"),
Self::Integer(i) => write!(f, "{}", i),
Self::Float(fl) => write!(f, "{:?}", fl),
Self::Integer(i) => write!(f, "{i}"),
Self::Float(fl) => write!(f, "{fl:?}"),
Self::Text(s) => write!(f, "{}", s.as_str()),
Self::Blob(b) => write!(f, "{}", String::from_utf8_lossy(b.to_slice())),
}
@@ -1198,7 +1198,7 @@ pub struct IndexKeySortOrder(u64);
impl IndexKeySortOrder {
pub fn get_sort_order_for_col(&self, column_idx: usize) -> SortOrder {
assert!(column_idx < 64, "column index out of range: {}", column_idx);
assert!(column_idx < 64, "column index out of range: {column_idx}");
match self.0 & (1 << column_idx) {
0 => SortOrder::Asc,
_ => SortOrder::Desc,
@@ -1442,10 +1442,7 @@ impl TryFrom<u64> for SerialType {
fn try_from(uint: u64) -> Result<Self> {
if uint == 10 || uint == 11 {
return Err(LimboError::Corrupt(format!(
"Invalid serial type: {}",
uint
)));
return Err(LimboError::Corrupt(format!("Invalid serial type: {uint}")));
}
Ok(SerialType(uint))
}
@@ -1505,7 +1502,7 @@ impl Record {
// if( nVarint<sqlite3VarintLen(nHdr) ) nHdr++;
}
assert!(header_size <= 126);
header_bytes_buf.extend(std::iter::repeat(0).take(9));
header_bytes_buf.extend(std::iter::repeat_n(0, 9));
let n = write_varint(header_bytes_buf.as_mut_slice(), header_size as u64);
header_bytes_buf.truncate(n);
buf.splice(initial_i..initial_i, header_bytes_buf.iter().cloned());

View File

@@ -225,7 +225,7 @@ impl ProgramBuilder {
pub fn constant_span_is_open(&self) -> bool {
self.constant_spans
.last()
.map_or(false, |(_, end)| *end == usize::MAX)
.is_some_and(|(_, end)| *end == usize::MAX)
}
/// Get the index of the next constant span.
@@ -272,7 +272,7 @@ impl ProgramBuilder {
!self
.cursor_ref
.iter()
.any(|(k, _)| k.as_ref().map_or(false, |k| k.equals(&key))),
.any(|(k, _)| k.as_ref().is_some_and(|k| k.equals(&key))),
"duplicate cursor key"
);
self._alloc_cursor_id(Some(key), cursor_type)
@@ -456,7 +456,7 @@ impl ProgramBuilder {
/// reordering the emitted instructions.
#[inline]
pub fn preassign_label_to_next_insn(&mut self, label: BranchOffset) {
assert!(label.is_label(), "BranchOffset {:?} is not a label", label);
assert!(label.is_label(), "BranchOffset {label:?} is not a label");
self._resolve_label(label, self.offset().sub(1u32), JumpTarget::AfterThisInsn);
}
@@ -492,10 +492,7 @@ impl ProgramBuilder {
let Some(Some((to_offset, target))) =
self.label_to_resolved_offset.get(*label as usize)
else {
panic!(
"Reference to undefined or unresolved label in {}: {}",
insn_name, label
);
panic!("Reference to undefined or unresolved label in {insn_name}: {label}");
};
*pc = BranchOffset::Offset(
to_offset
@@ -707,12 +704,12 @@ impl ProgramBuilder {
pub fn resolve_cursor_id_safe(&self, key: &CursorKey) -> Option<CursorID> {
self.cursor_ref
.iter()
.position(|(k, _)| k.as_ref().map_or(false, |k| k.equals(key)))
.position(|(k, _)| k.as_ref().is_some_and(|k| k.equals(key)))
}
pub fn resolve_cursor_id(&self, key: &CursorKey) -> CursorID {
self.resolve_cursor_id_safe(key)
.unwrap_or_else(|| panic!("Cursor not found: {:?}", key))
.unwrap_or_else(|| panic!("Cursor not found: {key:?}"))
}
pub fn set_collation(&mut self, c: Option<(CollationSeq, bool)>) {

View File

@@ -40,7 +40,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]+r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]+r[{rhs}]"),
),
Insn::Subtract { lhs, rhs, dest } => (
"Subtract",
@@ -49,7 +49,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]-r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]-r[{rhs}]"),
),
Insn::Multiply { lhs, rhs, dest } => (
"Multiply",
@@ -58,7 +58,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]*r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]*r[{rhs}]"),
),
Insn::Divide { lhs, rhs, dest } => (
"Divide",
@@ -67,7 +67,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]/r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]/r[{rhs}]"),
),
Insn::BitAnd { lhs, rhs, dest } => (
"BitAnd",
@@ -76,7 +76,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]&r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]&r[{rhs}]"),
),
Insn::BitOr { lhs, rhs, dest } => (
"BitOr",
@@ -85,7 +85,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]|r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]|r[{rhs}]"),
),
Insn::BitNot { reg, dest } => (
"BitNot",
@@ -94,7 +94,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("r[{}]=~r[{}]", dest, reg),
format!("r[{dest}]=~r[{reg}]"),
),
Insn::Checkpoint {
database,
@@ -107,7 +107,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("r[{}]=~r[{}]", dest, database),
format!("r[{dest}]=~r[{database}]"),
),
Insn::Remainder { lhs, rhs, dest } => (
"Remainder",
@@ -116,7 +116,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]%r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}]%r[{rhs}]"),
),
Insn::Null { dest, dest_end } => (
"Null",
@@ -125,8 +125,8 @@ pub fn insn_to_str(
dest_end.map_or(0, |end| end as i32),
Value::build_text(""),
0,
dest_end.map_or(format!("r[{}]=NULL", dest), |end| {
format!("r[{}..{}]=NULL", dest, end)
dest_end.map_or(format!("r[{dest}]=NULL"), |end| {
format!("r[{dest}..{end}]=NULL")
}),
),
Insn::NullRow { cursor_id } => (
@@ -136,7 +136,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("Set cursor {} to a (pseudo) NULL row", cursor_id),
format!("Set cursor {cursor_id} to a (pseudo) NULL row"),
),
Insn::NotNull { reg, target_pc } => (
"NotNull",
@@ -417,7 +417,7 @@ pub fn insn_to_str(
args_reg.unwrap_or(0) as i32,
Value::build_text(""),
0,
format!("table={}, module={}", table_name, module_name),
format!("table={table_name}, module={module_name}"),
),
Insn::VFilter {
cursor_id,
@@ -492,7 +492,7 @@ pub fn insn_to_str(
*num_fields as i32,
Value::build_text(""),
0,
format!("{} columns in r[{}]", num_fields, content_reg),
format!("{num_fields} columns in r[{content_reg}]"),
),
Insn::Rewind {
cursor_id,
@@ -578,7 +578,7 @@ pub fn insn_to_str(
dest_reg,
index_name,
} => {
let for_index = index_name.as_ref().map(|name| format!("; for {}", name));
let for_index = index_name.as_ref().map(|name| format!("; for {name}"));
(
"MakeRecord",
*start_reg as i32,
@@ -603,7 +603,7 @@ pub fn insn_to_str(
Value::build_text(""),
0,
if *count == 1 {
format!("output=r[{}]", start_reg)
format!("output=r[{start_reg}]")
} else {
format!("output=r[{}..{}]", start_reg, start_reg + count - 1)
},
@@ -652,7 +652,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("write={}", write),
format!("write={write}"),
),
Insn::Goto { target_pc } => (
"Goto",
@@ -694,7 +694,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("r[{}]={}", dest, value),
format!("r[{dest}]={value}"),
),
Insn::Real { value, dest } => (
"Real",
@@ -703,7 +703,7 @@ pub fn insn_to_str(
0,
Value::Float(*value),
0,
format!("r[{}]={}", dest, value),
format!("r[{dest}]={value}"),
),
Insn::RealAffinity { register } => (
"RealAffinity",
@@ -721,7 +721,7 @@ pub fn insn_to_str(
0,
Value::build_text(value),
0,
format!("r[{}]='{}'", dest, value),
format!("r[{dest}]='{value}'"),
),
Insn::Blob { value, dest } => (
"Blob",
@@ -765,7 +765,7 @@ pub fn insn_to_str(
if k.index.is_some() { "index" } else { "table" },
get_table_or_index_name(*cursor_id),
))
.unwrap_or(format!("cursor {}", cursor_id))
.unwrap_or(format!("cursor {cursor_id}"))
),
),
Insn::SeekRowid {
@@ -791,7 +791,7 @@ pub fn insn_to_str(
if k.index.is_some() { "index" } else { "table" },
get_table_or_index_name(*cursor_id),
))
.unwrap_or(format!("cursor {}", cursor_id)),
.unwrap_or(format!("cursor {cursor_id}")),
target_pc.as_debug_int()
),
),
@@ -873,7 +873,7 @@ pub fn insn_to_str(
unpacked_start.unwrap_or(0) as i32,
Value::build_text(""),
flags.0 as u16,
format!("key=r[{}]", record_reg),
format!("key=r[{record_reg}]"),
),
Insn::IdxGT {
cursor_id,
@@ -974,7 +974,7 @@ pub fn insn_to_str(
0,
Value::build_text(format!("k({},{})", order.len(), to_print.join(","))),
0,
format!("cursor={}", cursor_id),
format!("cursor={cursor_id}"),
)
}
Insn::SorterData {
@@ -988,7 +988,7 @@ pub fn insn_to_str(
*pseudo_cursor as i32,
Value::build_text(""),
0,
format!("r[{}]=data", dest_reg),
format!("r[{dest_reg}]=data"),
),
Insn::SorterInsert {
cursor_id,
@@ -1000,7 +1000,7 @@ pub fn insn_to_str(
0,
Value::Integer(0),
0,
format!("key=r[{}]", record_reg),
format!("key=r[{record_reg}]"),
),
Insn::SorterSort {
cursor_id,
@@ -1046,9 +1046,9 @@ pub fn insn_to_str(
},
0,
if func.arg_count == 0 {
format!("r[{}]=func()", dest)
format!("r[{dest}]=func()")
} else if *start_reg == *start_reg + func.arg_count - 1 {
format!("r[{}]=func(r[{}])", dest, start_reg)
format!("r[{dest}]=func(r[{start_reg}])")
} else {
format!(
"r[{}]=func(r[{}..{}])",
@@ -1105,7 +1105,7 @@ pub fn insn_to_str(
*key_reg as i32,
Value::build_text(table_name),
flag.0 as u16,
format!("intkey=r[{}] data=r[{}]", key_reg, record_reg),
format!("intkey=r[{key_reg}] data=r[{record_reg}]"),
),
Insn::Delete { cursor_id } => (
"Delete",
@@ -1141,7 +1141,7 @@ pub fn insn_to_str(
*prev_largest_reg as i32,
Value::build_text(""),
0,
format!("r[{}]=rowid", rowid_reg),
format!("r[{rowid_reg}]=rowid"),
),
Insn::MustBeInt { reg } => (
"MustBeInt",
@@ -1170,7 +1170,7 @@ pub fn insn_to_str(
let key = if *num_regs > 0 {
format!("key=r[{}..{}]", record_reg, record_reg + num_regs - 1)
} else {
format!("key=r[{}]", record_reg)
format!("key=r[{record_reg}]")
};
(
"NoConflict",
@@ -1207,8 +1207,7 @@ pub fn insn_to_str(
Value::build_text(""),
0,
format!(
"if r[{}]>0 then r[{}]=r[{}]+max(0,r[{}]) else r[{}]=(-1)",
limit_reg, combined_reg, limit_reg, offset_reg, combined_reg
"if r[{limit_reg}]>0 then r[{combined_reg}]=r[{limit_reg}]+max(0,r[{offset_reg}]) else r[{combined_reg}]=(-1)"
),
),
Insn::OpenWrite {
@@ -1226,7 +1225,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("root={}; {}", root_page, name),
format!("root={root_page}; {name}"),
),
Insn::Copy {
src_reg,
@@ -1239,7 +1238,7 @@ pub fn insn_to_str(
*amount as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}]", dst_reg, src_reg),
format!("r[{dst_reg}]=r[{src_reg}]"),
),
Insn::CreateBtree { db, root, flags } => (
"CreateBtree",
@@ -1262,8 +1261,7 @@ pub fn insn_to_str(
Value::build_text(""),
0,
format!(
"root iDb={} former_root={} is_temp={}",
root, former_root_reg, is_temp
"root iDb={root} former_root={former_root_reg} is_temp={is_temp}"
),
),
Insn::DropTable {
@@ -1278,7 +1276,7 @@ pub fn insn_to_str(
0,
Value::build_text(table_name),
0,
format!("DROP TABLE {}", table_name),
format!("DROP TABLE {table_name}"),
),
Insn::DropIndex { db: _, index } => (
"DropIndex",
@@ -1347,7 +1345,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}] >> r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}] >> r[{rhs}]"),
),
Insn::ShiftLeft { lhs, rhs, dest } => (
"ShiftLeft",
@@ -1356,7 +1354,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}] << r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}] << r[{rhs}]"),
),
Insn::Variable { index, dest } => (
"Variable",
@@ -1375,8 +1373,7 @@ pub fn insn_to_str(
Value::build_text(""),
0,
format!(
"((r[{}]=NULL)|(r[{}]=NULL)) ? r[{}]=NULL : r[{}]=0",
rg1, rg2, dest, dest
"((r[{rg1}]=NULL)|(r[{rg2}]=NULL)) ? r[{dest}]=NULL : r[{dest}]=0"
),
),
Insn::Not { reg, dest } => (
@@ -1386,7 +1383,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("r[{}]=!r[{}]", dest, reg),
format!("r[{dest}]=!r[{reg}]"),
),
Insn::Concat { lhs, rhs, dest } => (
"Concat",
@@ -1395,7 +1392,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=r[{}] + r[{}]", dest, lhs, rhs),
format!("r[{dest}]=r[{lhs}] + r[{rhs}]"),
),
Insn::And { lhs, rhs, dest } => (
"And",
@@ -1404,7 +1401,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=(r[{}] && r[{}])", dest, lhs, rhs),
format!("r[{dest}]=(r[{lhs}] && r[{rhs}])"),
),
Insn::Or { lhs, rhs, dest } => (
"Or",
@@ -1413,7 +1410,7 @@ pub fn insn_to_str(
*dest as i32,
Value::build_text(""),
0,
format!("r[{}]=(r[{}] || r[{}])", dest, lhs, rhs),
format!("r[{dest}]=(r[{lhs}] || r[{rhs}])"),
),
Insn::Noop => ("Noop", 0, 0, 0, Value::build_text(""), 0, String::new()),
Insn::PageCount { db, dest } => (
@@ -1458,7 +1455,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("auto_commit={}, rollback={}", auto_commit, rollback),
format!("auto_commit={auto_commit}, rollback={rollback}"),
),
Insn::OpenEphemeral {
cursor_id,
@@ -1483,7 +1480,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("cursor={}", cursor_id),
format!("cursor={cursor_id}"),
),
Insn::Once {
target_pc_when_reentered,
@@ -1503,8 +1500,8 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
dest_end.map_or(format!("r[{}]=NULL", dest), |end| {
format!("r[{}..{}]=NULL", dest, end)
dest_end.map_or(format!("r[{dest}]=NULL"), |end| {
format!("r[{dest}..{end}]=NULL")
}),
),
Insn::NotFound {
@@ -1599,7 +1596,7 @@ pub fn insn_to_str(
0,
Value::build_text(""),
0,
format!("roots={:?} message_register={}", roots, message_register),
format!("roots={roots:?} message_register={message_register}"),
),
Insn::RowData { cursor_id, dest } => (
"RowData",
@@ -1620,6 +1617,6 @@ pub fn insn_to_str(
p3,
p4.to_string(),
p5,
manual_comment.map_or(comment.to_string(), |mc| format!("{}; {}", comment, mc))
manual_comment.map_or(comment.to_string(), |mc| format!("{comment}; {mc}"))
)
}

View File

@@ -329,9 +329,9 @@ impl ProgramState {
let cursors = self.cursors.borrow_mut();
std::cell::RefMut::map(cursors, |c| {
c.get_mut(cursor_id)
.unwrap_or_else(|| panic!("cursor id {} out of bounds", cursor_id))
.unwrap_or_else(|| panic!("cursor id {cursor_id} out of bounds"))
.as_mut()
.unwrap_or_else(|| panic!("cursor id {} is None", cursor_id))
.unwrap_or_else(|| panic!("cursor id {cursor_id} is None"))
})
}
}

View File

@@ -44,8 +44,7 @@ impl VirtualTable {
.map(|(vtab, columns)| (VirtualTableType::Pragma(vtab), columns))?
} else {
return Err(LimboError::ParseError(format!(
"No such table-valued function: {}",
name
"No such table-valued function: {name}"
)));
};
@@ -216,8 +215,7 @@ impl ExtVirtualTable {
kind: VTabKind,
) -> crate::Result<(Self, String)> {
let module = module.ok_or(LimboError::ExtensionError(format!(
"Virtual table module not found: {}",
module_name
"Virtual table module not found: {module_name}"
)))?;
if kind != module.module_kind {
let expected = match kind {
@@ -225,8 +223,7 @@ impl ExtVirtualTable {
VTabKind::TableValuedFunction => "table-valued function",
};
return Err(LimboError::ExtensionError(format!(
"{} is not a {} module",
module_name, expected
"{module_name} is not a {expected} module"
)));
}
let (schema, table_ptr) = module.implementation.create(args)?;

View File

@@ -398,7 +398,7 @@ mod tests {
fn write_csv(content: &str) -> NamedTempFile {
let mut tmp = NamedTempFile::new().expect("Failed to create temp file");
write!(tmp, "{}", content).unwrap();
write!(tmp, "{content}").unwrap();
tmp
}
@@ -681,19 +681,19 @@ mod tests {
for &val in &true_values {
let result = try_new_table(vec![
"data=id,name\n1,Alice\n2,Bob\n",
&format!("header={}", val),
&format!("header={val}"),
]);
assert!(result.is_ok(), "Expected Ok for header='{}'", val);
assert!(result.unwrap().1.header, "Expected true for '{}'", val);
assert!(result.is_ok(), "Expected Ok for header='{val}'");
assert!(result.unwrap().1.header, "Expected true for '{val}'");
}
for &val in &false_values {
let result = try_new_table(vec![
"data=id,name\n1,Alice\n2,Bob\n",
&format!("header={}", val),
&format!("header={val}"),
]);
assert!(result.is_ok(), "Expected Ok for header='{}'", val);
assert!(!result.unwrap().1.header, "Expected false for '{}'", val);
assert!(result.is_ok(), "Expected Ok for header='{val}'");
assert!(!result.unwrap().1.header, "Expected false for '{val}'");
}
}
@@ -704,7 +704,7 @@ mod tests {
for &val in &invalid_values {
let result = try_new_table(vec![
"data=id,name\n1,Alice\n2,Bob\n",
&format!("header={}", val),
&format!("header={val}"),
]);
assert!(matches!(result, Err(ResultCode::InvalidArgs)));
}
@@ -747,13 +747,10 @@ mod tests {
let quotes = ["'", "\""];
for &quote in &quotes {
let table = new_table(vec![&format!(
"data={}aa{}{}bb{}",
quote, quote, quote, quote
)]);
let table = new_table(vec![&format!("data={quote}aa{quote}{quote}bb{quote}")]);
let cursor = table.open(None).unwrap();
let rows = read_rows(cursor, 1);
assert_eq!(rows, vec![vec![cell!(format!("aa{}bb", quote))]]);
assert_eq!(rows, vec![vec![cell!(format!("aa{quote}bb"))]]);
}
}
@@ -763,13 +760,10 @@ mod tests {
for &case in &cases {
let (outer, inner) = case;
let table = new_table(vec![&format!(
"data={}aa{}{}bb{}",
outer, inner, inner, outer
)]);
let table = new_table(vec![&format!("data={outer}aa{inner}{inner}bb{outer}")]);
let cursor = table.open(None).unwrap();
let rows = read_rows(cursor, 1);
assert_eq!(rows, vec![vec![cell!(format!("aa{}{}bb", inner, inner))]]);
assert_eq!(rows, vec![vec![cell!(format!("aa{inner}{inner}bb"))]]);
}
}
@@ -786,7 +780,7 @@ mod tests {
for &val in &invalid_values {
let result = try_new_table(vec![
"data=id,name\n1,Alice\n2,Bob\n",
&format!("columns={}", val),
&format!("columns={val}"),
]);
assert!(matches!(result, Err(ResultCode::InvalidArgs)));
}

View File

@@ -59,7 +59,7 @@ impl VTabCursor for KVStoreCursor {
.first()
.and_then(|v| v.to_text())
.map(|s| s.to_string());
log::debug!("idx_str found: key_eq\n value: {:?}", key);
log::debug!("idx_str found: key_eq\n value: {key:?}");
if let Some(key) = key {
let rowid = hash_key(&key);
let store = GLOBAL_STORE.lock().unwrap();
@@ -251,7 +251,7 @@ impl VfsExtension for TestFS {
type File = TestFile;
fn open_file(&self, path: &str, flags: i32, _direct: bool) -> ExtResult<Self::File> {
let _ = env_logger::try_init();
log::debug!("opening file with testing VFS: {} flags: {}", path, flags);
log::debug!("opening file with testing VFS: {path} flags: {flags}");
let file = OpenOptions::new()
.read(true)
.write(true)
@@ -372,7 +372,7 @@ impl VTabCursor for StatsCursor {
master.close();
for tbl in tables {
// count rows for each table
if let Ok(mut count_stmt) = conn.prepare(&format!("SELECT COUNT(*) FROM {};", tbl)) {
if let Ok(mut count_stmt) = conn.prepare(&format!("SELECT COUNT(*) FROM {tbl};")) {
let count = match count_stmt.step() {
StepResult::Row => count_stmt.get_row()[0].to_integer().unwrap_or(0),
_ => 0,

View File

@@ -23,8 +23,7 @@ pub fn register_extension(input: TokenStream) -> TokenStream {
} = input_ast;
let scalar_calls = scalars.iter().map(|scalar_ident| {
let register_fn =
syn::Ident::new(&format!("register_{}", scalar_ident), scalar_ident.span());
let register_fn = syn::Ident::new(&format!("register_{scalar_ident}"), scalar_ident.span());
quote! {
{
let result = unsafe { #register_fn(api)};
@@ -36,7 +35,7 @@ pub fn register_extension(input: TokenStream) -> TokenStream {
});
let aggregate_calls = aggregates.iter().map(|agg_ident| {
let register_fn = syn::Ident::new(&format!("register_{}", agg_ident), agg_ident.span());
let register_fn = syn::Ident::new(&format!("register_{agg_ident}"), agg_ident.span());
quote! {
{
let result = unsafe{ #agg_ident::#register_fn(api)};
@@ -47,7 +46,7 @@ pub fn register_extension(input: TokenStream) -> TokenStream {
}
});
let vtab_calls = vtabs.iter().map(|vtab_ident| {
let register_fn = syn::Ident::new(&format!("register_{}", vtab_ident), vtab_ident.span());
let register_fn = syn::Ident::new(&format!("register_{vtab_ident}"), vtab_ident.span());
quote! {
{
let result = unsafe{ #vtab_ident::#register_fn(api)};
@@ -58,7 +57,7 @@ pub fn register_extension(input: TokenStream) -> TokenStream {
}
});
let vfs_calls = vfs.iter().map(|vfs_ident| {
let register_fn = syn::Ident::new(&format!("register_{}", vfs_ident), vfs_ident.span());
let register_fn = syn::Ident::new(&format!("register_{vfs_ident}"), vfs_ident.span());
quote! {
{
let result = unsafe { #register_fn(api) };
@@ -70,7 +69,7 @@ pub fn register_extension(input: TokenStream) -> TokenStream {
});
let static_vfs = vfs.iter().map(|vfs_ident| {
let static_register =
syn::Ident::new(&format!("register_static_{}", vfs_ident), vfs_ident.span());
syn::Ident::new(&format!("register_static_{vfs_ident}"), vfs_ident.span());
quote! {
{
let result = api.add_builtin_vfs(unsafe { #static_register()});

View File

@@ -87,7 +87,7 @@ fn process_payload(payload_group: Group) -> String {
match token {
TokenTree::Ident(ident) => {
if is_variable_name {
variable_name_list.push_str(&format!("{},", ident));
variable_name_list.push_str(&format!("{ident},"));
}
is_variable_name = false;
}
@@ -99,7 +99,7 @@ fn process_payload(payload_group: Group) -> String {
_ => {}
}
}
format!("{{ {} }}", variable_name_list).to_string()
format!("{{ {variable_name_list} }}").to_string()
}
/// Generates the `get_description` implementation for the processed enum.
fn generate_get_description(
@@ -112,25 +112,21 @@ fn generate_get_description(
let payload = payload.unwrap_or("".to_string());
let desc;
if let Some(description) = variant_description_map.get(&variant) {
desc = format!("Some({})", description);
desc = format!("Some({description})");
} else {
desc = "None".to_string();
}
all_enum_arms.push_str(&format!(
"{}::{} {} => {},\n",
enum_name, variant, payload, desc
));
all_enum_arms.push_str(&format!("{enum_name}::{variant} {payload} => {desc},\n"));
}
let enum_impl = format!(
"impl {} {{
"impl {enum_name} {{
pub fn get_description(&self) -> Option<&str> {{
match self {{
{}
{all_enum_arms}
}}
}}
}}",
enum_name, all_enum_arms
}}"
);
enum_impl.parse().unwrap()
}

View File

@@ -449,11 +449,8 @@ mod tests {
let predicate = Predicate::true_binary(&mut rng, &table, row);
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
seed
value.as_ref().is_some_and(|value| value.as_bool()),
"Predicate: {predicate:#?}\nValue: {value:#?}\nSeed: {seed}"
)
}
}
@@ -478,11 +475,8 @@ mod tests {
let predicate = Predicate::false_binary(&mut rng, &table, row);
let value = expr_to_value(&predicate.0, row, &table);
assert!(
!value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
seed
!value.as_ref().is_some_and(|value| value.as_bool()),
"Predicate: {predicate:#?}\nValue: {value:#?}\nSeed: {seed}"
)
}
}
@@ -511,7 +505,7 @@ mod tests {
.map(|row| predicate.0.test(row, &table))
.reduce(|accum, curr| accum || curr)
.unwrap_or(false);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
@@ -538,7 +532,7 @@ mod tests {
.iter()
.map(|row| predicate.0.test(row, &table))
.any(|res| !res);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
}

View File

@@ -268,11 +268,8 @@ mod tests {
let predicate = SimplePredicate::arbitrary_from(&mut rng, (&table, row, true)).0;
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
seed
value.as_ref().is_some_and(|value| value.as_bool()),
"Predicate: {predicate:#?}\nValue: {value:#?}\nSeed: {seed}"
)
}
}
@@ -297,11 +294,8 @@ mod tests {
let predicate = SimplePredicate::arbitrary_from(&mut rng, (&table, row, false)).0;
let value = expr_to_value(&predicate.0, row, &table);
assert!(
!value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
seed
!value.as_ref().is_some_and(|value| value.as_bool()),
"Predicate: {predicate:#?}\nValue: {value:#?}\nSeed: {seed}"
)
}
}
@@ -326,11 +320,8 @@ mod tests {
let predicate = Predicate::arbitrary_from(&mut rng, (&table, row));
let value = expr_to_value(&predicate.0, row, &table);
assert!(
value.as_ref().map_or(false, |value| value.as_bool()),
"Predicate: {:#?}\nValue: {:#?}\nSeed: {}",
predicate,
value,
seed
value.as_ref().is_some_and(|value| value.as_bool()),
"Predicate: {predicate:#?}\nValue: {value:#?}\nSeed: {seed}"
)
}
}
@@ -358,7 +349,7 @@ mod tests {
.map(|row| predicate.test(row, &table))
.reduce(|accum, curr| accum || curr)
.unwrap_or(false);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
@@ -384,7 +375,7 @@ mod tests {
.iter()
.map(|row| predicate.test(row, &table))
.any(|res| !res);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
}

View File

@@ -263,7 +263,7 @@ mod tests {
.map(|row| predicate.0.test(row, &table))
.reduce(|accum, curr| accum || curr)
.unwrap_or(false);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
@@ -290,7 +290,7 @@ mod tests {
.iter()
.map(|row| predicate.0.test(row, &table))
.any(|res| !res);
assert!(result, "Predicate: {:#?}\nSeed: {}", predicate, seed)
assert!(result, "Predicate: {predicate:#?}\nSeed: {seed}")
}
}
}

View File

@@ -65,15 +65,15 @@ fn main() -> anyhow::Result<()> {
Some(SimulatorCommand::Loop { n, short_circuit }) => {
banner();
for i in 0..n {
println!("iteration {}", i);
println!("iteration {i}");
let result = testing_main(&cli_opts);
if result.is_err() && short_circuit {
println!("short circuiting after {} iterations", i);
println!("short circuiting after {i} iterations");
return result;
} else if result.is_err() {
println!("iteration {} failed", i);
println!("iteration {i} failed");
} else {
println!("iteration {} succeeded", i);
println!("iteration {i} succeeded");
}
}
Ok(())
@@ -180,7 +180,7 @@ fn testing_main(cli_opts: &SimulatorCLI) -> anyhow::Result<()> {
};
// Print the seed, the locations of the database and the plan file at the end again for easily accessing them.
println!("seed: {}", seed);
println!("seed: {seed}");
println!("path: {}", paths.base.display());
result
@@ -237,7 +237,7 @@ fn watch_mode(
}
}
}
Err(e) => println!("watch error: {:?}", e),
Err(e) => println!("watch error: {e:?}"),
}
}
@@ -557,7 +557,7 @@ impl SandboxedResult {
match result {
Ok(ExecutionResult { error: None, .. }) => SandboxedResult::Correct,
Ok(ExecutionResult { error: Some(e), .. }) => {
let error = format!("{:?}", e);
let error = format!("{e:?}");
let last_execution = last_execution.lock().unwrap();
SandboxedResult::Panicked {
error,
@@ -600,7 +600,7 @@ fn setup_simulation(
tracing::info!("seed={}", seed);
let bug = bugbase
.get_bug(seed)
.unwrap_or_else(|| panic!("bug '{}' not found in bug base", seed));
.unwrap_or_else(|| panic!("bug '{seed}' not found in bug base"));
let paths = bugbase.paths(seed);
if !paths.base.exists() {
@@ -614,7 +614,7 @@ fn setup_simulation(
let seed = *seed;
bugbase
.load_bug(seed)
.unwrap_or_else(|_| panic!("could not load bug '{}' in bug base", seed))
.unwrap_or_else(|_| panic!("could not load bug '{seed}' in bug base"))
.plan
.clone()
}
@@ -640,7 +640,7 @@ fn setup_simulation(
// Create the output directory if it doesn't exist
if !paths.base.exists() {
std::fs::create_dir_all(&paths.base)
.map_err(|e| format!("{:?}", e))
.map_err(|e| format!("{e:?}"))
.unwrap();
}
paths
@@ -731,7 +731,7 @@ fn init_logger() {
}
fn banner() {
println!("{}", BANNER);
println!("{BANNER}");
}
const BANNER: &str = r#"
@@ -771,9 +771,7 @@ fn integrity_check(db_path: &Path) -> anyhow::Result<()> {
}
if !result[0].eq_ignore_ascii_case("ok") {
// Build a list of problems
result
.iter_mut()
.for_each(|row| *row = format!("- {}", row));
result.iter_mut().for_each(|row| *row = format!("- {row}"));
anyhow::bail!("simulation failed: {}", result.join("\n"))
}
Ok(())

View File

@@ -47,7 +47,7 @@ impl std::fmt::Display for CreateIndex {
self.table_name,
self.columns
.iter()
.map(|(name, order)| format!("{} {}", name, order))
.map(|(name, order)| format!("{name} {order}"))
.collect::<Vec<String>>()
.join(", ")
)

View File

@@ -48,7 +48,7 @@ impl Display for Insert {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Insert::Values { table, values } => {
write!(f, "INSERT INTO {} VALUES ", table)?;
write!(f, "INSERT INTO {table} VALUES ")?;
for (i, row) in values.iter().enumerate() {
if i != 0 {
write!(f, ", ")?;
@@ -58,15 +58,15 @@ impl Display for Insert {
if j != 0 {
write!(f, ", ")?;
}
write!(f, "{}", value)?;
write!(f, "{value}")?;
}
write!(f, ")")?;
}
Ok(())
}
Insert::Select { table, select } => {
write!(f, "INSERT INTO {} ", table)?;
write!(f, "{}", select)
write!(f, "INSERT INTO {table} ")?;
write!(f, "{select}")
}
}
}

View File

@@ -22,7 +22,7 @@ impl Predicate {
pub(crate) fn test(&self, row: &[SimValue], table: &Table) -> bool {
let value = expr_to_value(&self.0, row, table);
value.map_or(false, |value| value.as_bool())
value.is_some_and(|value| value.as_bool())
}
}

View File

@@ -30,9 +30,9 @@ pub enum ResultColumn {
impl Display for ResultColumn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ResultColumn::Expr(expr) => write!(f, "({})", expr),
ResultColumn::Expr(expr) => write!(f, "({expr})"),
ResultColumn::Star => write!(f, "*"),
ResultColumn::Column(name) => write!(f, "{}", name),
ResultColumn::Column(name) => write!(f, "{name}"),
}
}
}
@@ -73,8 +73,7 @@ impl Display for Select {
.join(", "),
self.table,
self.predicate,
self.limit
.map_or("".to_string(), |l| format!(" LIMIT {}", l))
self.limit.map_or("".to_string(), |l| format!(" LIMIT {l}"))
)
}
}

View File

@@ -48,7 +48,7 @@ impl Display for Update {
if i != 0 {
write!(f, ", ")?;
}
write!(f, "{} = {}", name, value)?;
write!(f, "{name} = {value}")?;
}
write!(f, " WHERE {}", self.predicate)?;
Ok(())

View File

@@ -67,7 +67,7 @@ fn float_to_string<S>(float: &f64, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&format!("{}", float))
serializer.serialize_str(&format!("{float}"))
}
fn string_to_float<'de, D>(deserializer: D) -> Result<f64, D::Error>
@@ -86,7 +86,7 @@ fn to_sqlite_blob(bytes: &[u8]) -> String {
"X'{}'",
bytes
.iter()
.fold(String::new(), |acc, b| acc + &format!("{:02X}", b))
.fold(String::new(), |acc, b| acc + &format!("{b:02X}"))
)
}
@@ -94,9 +94,9 @@ impl Display for SimValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
types::Value::Null => write!(f, "NULL"),
types::Value::Integer(i) => write!(f, "{}", i),
types::Value::Float(fl) => write!(f, "{}", fl),
value @ types::Value::Text(..) => write!(f, "'{}'", value),
types::Value::Integer(i) => write!(f, "{i}"),
types::Value::Float(fl) => write!(f, "{fl}"),
value @ types::Value::Text(..) => write!(f, "'{value}'"),
types::Value::Blob(b) => write!(f, "{}", to_sqlite_blob(b)),
}
}

View File

@@ -366,7 +366,7 @@ impl BugBase {
}
);
if let Some(error) = &run.error {
println!(" error: {}", error);
println!(" error: {error}");
}
}
println!(" ------------------");
@@ -384,12 +384,12 @@ impl BugBase {
/// Get the path to the database file for a given seed.
pub(crate) fn db_path(&self, seed: u64) -> PathBuf {
self.path.join(format!("{}/test.db", seed))
self.path.join(format!("{seed}/test.db"))
}
/// Get paths to all the files for a given seed.
pub(crate) fn paths(&self, seed: u64) -> Paths {
let base = self.path.join(format!("{}/", seed));
let base = self.path.join(format!("{seed}/"));
Paths::new(&base)
}
}

View File

@@ -344,7 +344,7 @@ fn execute_interaction_rusqlite(
tracing::debug!("{}", interaction);
let results = execute_query_rusqlite(conn, query).map_err(|e| {
turso_core::LimboError::InternalError(format!("error executing query: {}", e))
turso_core::LimboError::InternalError(format!("error executing query: {e}"))
});
tracing::debug!("{:?}", results);
stack.push(results);

View File

@@ -95,10 +95,7 @@ impl SimulatorEnv {
let abs_diff = (summed_total - total).abs();
if abs_diff > 0.0001 {
panic!(
"Summed total {} is not equal to total {}",
summed_total, total
);
panic!("Summed total {summed_total} is not equal to total {total}");
}
let opts = SimulatorOpts {
@@ -150,7 +147,7 @@ impl SimulatorEnv {
) {
Ok(db) => db,
Err(e) => {
panic!("error opening simulator test file {:?}: {:?}", db_path, e);
panic!("error opening simulator test file {db_path:?}: {e:?}");
}
};

View File

@@ -282,8 +282,7 @@ fn limbo_integrity_check(conn: &Arc<Connection>) -> Result<()> {
let message = result.join("\n");
if message != "ok" {
return Err(LimboError::InternalError(format!(
"Integrity Check Failed: {}",
message
"Integrity Check Failed: {message}"
)));
}
Ok(())

View File

@@ -71,7 +71,7 @@ impl SimulatorFile {
0 // No fault counter for sync
),
"--------- -------- --------".to_string(),
format!("total {:8} {:8}", sum_calls, sum_faults),
format!("total {sum_calls:8} {sum_faults:8}"),
];
stats_table.join("\n")

View File

@@ -172,7 +172,7 @@ pub(crate) fn sqlite_exec_rows(
let column: rusqlite::types::Value = match row.get(i) {
Ok(column) => column,
Err(rusqlite::Error::InvalidColumnIndex(_)) => break,
Err(err) => panic!("unexpected rusqlite error: {}", err),
Err(err) => panic!("unexpected rusqlite error: {err}"),
};
result.push(column);
}
@@ -201,8 +201,9 @@ pub(crate) fn limbo_exec_rows(
stmt.run_once().unwrap();
continue;
}
turso_core::StepResult::Done => break 'outer,
r => panic!("unexpected result {:?}: expecting single row", r),
r => panic!("unexpected result {r:?}: expecting single row"),
}
};
let row = row
@@ -234,7 +235,7 @@ pub(crate) fn limbo_exec_rows_error(
continue;
}
turso_core::StepResult::Done => return Ok(()),
r => panic!("unexpected result {:?}: expecting single row", r),
r => panic!("unexpected result {r:?}: expecting single row"),
}
}
}
@@ -297,7 +298,7 @@ mod tests {
);
let conn = db.connect_limbo();
let ret = limbo_exec_rows(&db, &conn, "CREATE table t(a)");
assert!(ret.is_empty(), "{:?}", ret);
assert!(ret.is_empty(), "{ret:?}");
limbo_exec_rows(&db, &conn, "INSERT INTO t values (1)");
conn.close().unwrap()
}
@@ -313,7 +314,7 @@ mod tests {
assert_eq!(ret, vec![vec![Value::Integer(1)]]);
let err = limbo_exec_rows_error(&db, &conn, "INSERT INTO t values (1)").unwrap_err();
assert!(matches!(err, turso_core::LimboError::ReadOnly), "{:?}", err);
assert!(matches!(err, turso_core::LimboError::ReadOnly), "{err:?}");
}
Ok(())
}

View File

@@ -57,7 +57,7 @@ fn test_last_insert_rowid_basic() -> anyhow::Result<()> {
}
},
Ok(None) => {}
Err(err) => eprintln!("{}", err),
Err(err) => eprintln!("{err}"),
};
// Check last_insert_rowid after explicit id
@@ -80,7 +80,7 @@ fn test_last_insert_rowid_basic() -> anyhow::Result<()> {
}
},
Ok(None) => {}
Err(err) => eprintln!("{}", err),
Err(err) => eprintln!("{err}"),
};
assert_eq!(last_id, 5, "Explicit insert should have rowid 5");
do_flush(&conn, &tmp_db)?;

View File

@@ -36,8 +36,7 @@ mod tests {
let sqlite_result = sqlite_exec_rows(&sqlite_conn, offending_query);
assert_eq!(
limbo_result, sqlite_result,
"query: {}, limbo: {:?}, sqlite: {:?}",
offending_query, limbo_result, sqlite_result
"query: {offending_query}, limbo: {limbo_result:?}, sqlite: {sqlite_result:?}"
);
}
@@ -55,8 +54,7 @@ mod tests {
let sqlite = sqlite_exec_rows(&sqlite_conn, query);
assert_eq!(
limbo, sqlite,
"query: {}, limbo: {:?}, sqlite: {:?}",
query, limbo, sqlite
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?}"
);
}
}
@@ -69,7 +67,7 @@ mod tests {
let insert = format!(
"INSERT INTO t VALUES {}",
(1..100)
.map(|x| format!("({})", x))
.map(|x| format!("({x})"))
.collect::<Vec<_>>()
.join(", ")
);
@@ -104,13 +102,12 @@ mod tests {
order_by.unwrap_or("")
);
log::trace!("query: {}", query);
log::trace!("query: {query}");
let limbo_result = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite_result = sqlite_exec_rows(&sqlite_conn, &query);
assert_eq!(
limbo_result, sqlite_result,
"query: {}, limbo: {:?}, sqlite: {:?}, seed: {}",
query, limbo_result, sqlite_result, seed
"query: {query}, limbo: {limbo_result:?}, sqlite: {sqlite_result:?}, seed: {seed}"
);
}
}
@@ -673,15 +670,14 @@ mod tests {
let sqlite_conn = rusqlite::Connection::open_in_memory().unwrap();
let (mut rng, seed) = rng_from_time();
log::info!("seed: {}", seed);
log::info!("seed: {seed}");
for _ in 0..1024 {
let query = g.generate(&mut rng, sql, 50);
let limbo = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
assert_eq!(
limbo, sqlite,
"query: {}, limbo: {:?}, sqlite: {:?} seed: {}",
query, limbo, sqlite, seed
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?} seed: {seed}"
);
}
}
@@ -708,8 +704,7 @@ mod tests {
let sqlite = sqlite_exec_rows(&sqlite_conn, query);
assert_eq!(
limbo, sqlite,
"query: {}, limbo: {:?}, sqlite: {:?}",
query, limbo, sqlite
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?}"
);
}
}
@@ -794,10 +789,10 @@ mod tests {
let sqlite_conn = rusqlite::Connection::open_in_memory().unwrap();
let (mut rng, seed) = rng_from_time();
log::info!("seed: {}", seed);
log::info!("seed: {seed}");
for _ in 0..1024 {
let query = g.generate(&mut rng, sql, 50);
log::info!("query: {}", query);
log::info!("query: {query}");
let limbo = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
match (&limbo[0][0], &sqlite[0][0]) {
@@ -808,11 +803,7 @@ mod tests {
assert!(
(limbo - sqlite).abs() < 1e-9
|| (limbo - sqlite) / (limbo.abs().max(sqlite.abs())) < 1e-9,
"query: {}, limbo: {:?}, sqlite: {:?} seed: {}",
query,
limbo,
sqlite,
seed
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?} seed: {seed}"
)
}
_ => {}
@@ -958,16 +949,15 @@ mod tests {
let sqlite_conn = rusqlite::Connection::open_in_memory().unwrap();
let (mut rng, seed) = rng_from_time();
log::info!("seed: {}", seed);
log::info!("seed: {seed}");
for _ in 0..1024 {
let query = g.generate(&mut rng, sql, 50);
log::info!("query: {}", query);
log::info!("query: {query}");
let limbo = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
assert_eq!(
limbo, sqlite,
"query: {}, limbo: {:?}, sqlite: {:?} seed: {}",
query, limbo, sqlite, seed
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?} seed: {seed}"
);
}
}
@@ -1328,16 +1318,15 @@ mod tests {
let sqlite_conn = rusqlite::Connection::open_in_memory().unwrap();
let (mut rng, seed) = rng_from_time();
log::info!("seed: {}", seed);
log::info!("seed: {seed}");
for _ in 0..1024 {
let query = g.generate(&mut rng, sql, 50);
log::info!("query: {}", query);
log::info!("query: {query}");
let limbo = limbo_exec_rows(&db, &limbo_conn, &query);
let sqlite = sqlite_exec_rows(&sqlite_conn, &query);
assert_eq!(
limbo, sqlite,
"query: {}, limbo: {:?}, sqlite: {:?} seed: {}",
query, limbo, sqlite, seed
"query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?} seed: {seed}"
);
}
}
@@ -1366,8 +1355,7 @@ mod tests {
let sqlite = sqlite_exec_rows(&sqlite_conn, query);
assert_eq!(
limbo, sqlite,
"queries: {:?}, query: {}, limbo: {:?}, sqlite: {:?}",
queries, query, limbo, sqlite
"queries: {queries:?}, query: {query}, limbo: {limbo:?}, sqlite: {sqlite:?}"
);
}
}

View File

@@ -50,7 +50,7 @@ fn test_simple_overflow_page() -> anyhow::Result<()> {
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
};
@@ -77,7 +77,7 @@ fn test_simple_overflow_page() -> anyhow::Result<()> {
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
}
do_flush(&conn, &tmp_db)?;
@@ -118,7 +118,7 @@ fn test_sequential_overflow_page() -> anyhow::Result<()> {
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
};
}
@@ -147,7 +147,7 @@ fn test_sequential_overflow_page() -> anyhow::Result<()> {
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
}
do_flush(&conn, &tmp_db)?;
@@ -167,12 +167,12 @@ fn test_sequential_write() -> anyhow::Result<()> {
let list_query = "SELECT * FROM test";
let max_iterations = 10000;
for i in 0..max_iterations {
println!("inserting {} ", i);
println!("inserting {i} ");
if (i % 100) == 0 {
let progress = (i as f64 / max_iterations as f64) * 100.0;
println!("progress {:.1}%", progress);
println!("progress {progress:.1}%");
}
let insert_query = format!("INSERT INTO test VALUES ({})", i);
let insert_query = format!("INSERT INTO test VALUES ({i})");
run_query(&tmp_db, &conn, &insert_query)?;
let mut current_read_index = 0;
@@ -283,7 +283,7 @@ fn test_wal_checkpoint() -> anyhow::Result<()> {
let conn = tmp_db.connect_limbo();
for i in 0..iterations {
let insert_query = format!("INSERT INTO test VALUES ({})", i);
let insert_query = format!("INSERT INTO test VALUES ({i})");
do_flush(&conn, &tmp_db)?;
conn.checkpoint()?;
run_query(&tmp_db, &conn, &insert_query)?;
@@ -309,10 +309,10 @@ fn test_wal_restart() -> anyhow::Result<()> {
// threshold is 1000 by default
fn insert(i: usize, conn: &Arc<Connection>, tmp_db: &TempDatabase) -> anyhow::Result<()> {
debug!("inserting {}", i);
let insert_query = format!("INSERT INTO test VALUES ({})", i);
debug!("inserting {i}");
let insert_query = format!("INSERT INTO test VALUES ({i})");
run_query(tmp_db, conn, &insert_query)?;
debug!("inserted {}", i);
debug!("inserted {i}");
tmp_db.io.run_once()?;
Ok(())
}
@@ -324,7 +324,7 @@ fn test_wal_restart() -> anyhow::Result<()> {
run_query_on_row(tmp_db, conn, list_query, |row: &Row| {
assert!(count.is_none());
count = Some(row.get::<i64>(0).unwrap() as usize);
debug!("counted {:?}", count);
debug!("counted {count:?}");
})?;
Ok(count.unwrap())
}
@@ -372,15 +372,15 @@ fn test_write_delete_with_index() -> anyhow::Result<()> {
let list_query = "SELECT * FROM test";
let max_iterations = 1000;
for i in 0..max_iterations {
println!("inserting {} ", i);
let insert_query = format!("INSERT INTO test VALUES ({})", i);
println!("inserting {i} ");
let insert_query = format!("INSERT INTO test VALUES ({i})");
run_query(&tmp_db, &conn, &insert_query)?;
}
for i in 0..max_iterations {
println!("deleting {} ", i);
let delete_query = format!("delete from test where x={}", i);
println!("deleting {i} ");
let delete_query = format!("delete from test where x={i}");
run_query(&tmp_db, &conn, &delete_query)?;
println!("listing after deleting {} ", i);
println!("listing after deleting {i} ");
let mut current_read_index = i + 1;
run_query_on_row(&tmp_db, &conn, list_query, |row: &Row| {
let first_value = row.get::<&Value>(0).expect("missing id");
@@ -397,7 +397,7 @@ fn test_write_delete_with_index() -> anyhow::Result<()> {
run_query_on_row(
&tmp_db,
&conn,
&format!("select * from test where x = {}", i),
&format!("select * from test where x = {i}"),
|row| {
let first_value = row.get::<&Value>(0).expect("missing id");
let id = match first_value {
@@ -798,7 +798,7 @@ fn run_query_core(
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
};
Ok(())

View File

@@ -64,7 +64,7 @@ fn test_wal_1_writer_1_reader() -> Result<()> {
let writer_thread = std::thread::spawn(move || {
let conn = tmp_db_w.connect().unwrap();
for i in 0..ROWS_WRITE {
conn.execute(format!("INSERT INTO t values({})", i).as_str())
conn.execute(format!("INSERT INTO t values({i})").as_str())
.unwrap();
let mut rows = rows_.lock().unwrap();
*rows += 1;
@@ -95,7 +95,7 @@ fn test_wal_1_writer_1_reader() -> Result<()> {
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
eprintln!("{err}");
}
}
if rows == ROWS_WRITE {