Merge with main

This commit is contained in:
Jorge Hermo
2025-01-15 22:10:35 +01:00
100 changed files with 6879 additions and 1579 deletions

View File

@@ -6,5 +6,9 @@
"pereman2": {
"name": "Pere Diaz Bou",
"email": "pere-altea@homail.com"
},
"jussisaurio": {
"name": "Jussi Saurio",
"email": "jussi.saurio@gmail.com"
}
}

View File

@@ -32,7 +32,7 @@ jobs:
uses: actions/setup-java@v3
with:
distribution: 'temurin'
java-version: '11'
java-version: '8'
- name: Run Java tests
run: make test

View File

@@ -37,7 +37,7 @@ jobs:
env:
RUST_LOG: ${{ runner.debug && 'limbo_core::storage=trace' || '' }}
run: cargo test --verbose
timeout-minutes: 5
timeout-minutes: 10
clippy:

View File

@@ -1,5 +1,61 @@
# Changelog
## 0.0.12 - 2025-01-14
### Added
**Core:**
* Improve JSON function support (Kacper Madej, Peter Sooley)
* Support nested parenthesized conditional expressions (Preston Thorpe)
* Add support for changes() and total_changes() functions (Lemon-Peppermint)
* Auto-create index in CREATE TABLE when necessary (Jussi Saurio)
* Add partial support for datetime() function (Preston Thorpe)
* SQL parser performance improvements (Jussi Saurio)
**Shell:**
* Show pretty parse errors in the shell (Samyak Sarnayak)
* Add CSV import support to shell (Vrishabh)
* Selectable IO backend with --io={syscall,io-uring} argument (Jorge López Tello)
**Bindings:**
* Initial version of Java bindings (Kim Seon Woo)
* Initial version of Rust bindings (Pekka Enberg)
* Add OPFS support to Wasm bindings (Elijah Morgan)
* Support uncorrelated FROM clause subqueries (Jussi Saurio)
* In-memory support to `sqlite3_open()` (Pekka Enberg)
### Fixed
* Make iterate() lazy in JavaScript bindings (Diego Reis)
* Fix integer overflow output to be same as sqlite3 (Vrishabh)
* Fix 8-bit serial type to encoding (Preston Thorpe)
* Query plan optimizer bug fixes (Jussi Saurio)
* B-Tree balancing fixes (Pere Diaz Bou)
* Fix index seek wrong on `SeekOp::LT`\`SeekOp::GT` (Kould)
* Fix arithmetic operations for text values' from Vrishabh
* Fix quote escape in SQL literals (Vrishabh)
## 0.0.11 - 2024-12-31
### Added

View File

@@ -120,7 +120,7 @@ Feature support of [sqlite expr syntax](https://www.sqlite.org/lang_expr.html).
| like(X,Y,Z) | Yes | |
| likelihood(X,Y) | No | |
| likely(X) | No | |
| load_extension(X) | No | |
| load_extension(X) | Yes | sqlite3 extensions not yet supported |
| load_extension(X,Y) | No | |
| lower(X) | Yes | |
| ltrim(X) | Yes | |
@@ -445,8 +445,8 @@ Feature support of [sqlite expr syntax](https://www.sqlite.org/lang_expr.html).
| SeekRowid | Yes |
| Sequence | No |
| SetCookie | No |
| ShiftLeft | No |
| ShiftRight | No |
| ShiftLeft | Yes |
| ShiftRight | Yes |
| SoftNull | Yes |
| Sort | No |
| SorterCompare | No |

View File

@@ -142,4 +142,16 @@ Once Maturin is installed, you can build the crate and install it as a Python mo
```bash
cd bindings/python && maturin develop
```
```
## Adding Third Party Dependencies
When you want to add third party dependencies, please follow these steps:
1. Add Licenses: Place the appropriate licenses for the third-party dependencies under the licenses directory. Ensure
that each license is in a separate file and named appropriately.
2. Update NOTICE.md: Specify the licenses for the third-party dependencies in the NOTICE.md file. Include the name of
the dependency, the license file path, and the homepage of the dependency.
By following these steps, you ensure that all third-party dependencies are properly documented and their licenses are
included in the project.

171
Cargo.lock generated
View File

@@ -331,7 +331,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -417,7 +417,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_tester"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"anyhow",
"clap",
@@ -665,7 +665,7 @@ checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -809,7 +809,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1059,7 +1059,7 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
[[package]]
name = "java-limbo"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"anyhow",
"jni",
@@ -1137,6 +1137,16 @@ version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "libloading"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
]
[[package]]
name = "libmimalloc-sys"
version = "0.1.39"
@@ -1170,7 +1180,7 @@ dependencies = [
[[package]]
name = "limbo"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"anyhow",
"clap",
@@ -1186,7 +1196,7 @@ dependencies = [
[[package]]
name = "limbo-wasm"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"console_error_panic_hook",
"js-sys",
@@ -1198,7 +1208,7 @@ dependencies = [
[[package]]
name = "limbo_core"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"bumpalo",
"cfg_block",
@@ -1212,12 +1222,13 @@ dependencies = [
"jsonb",
"julian_day_converter",
"libc",
"libloading",
"limbo_extension",
"limbo_macros",
"log",
"miette",
"mimalloc",
"mockall",
"nix 0.29.0",
"pest",
"pest_derive",
"polling",
@@ -1236,13 +1247,35 @@ dependencies = [
"uuid",
]
[[package]]
name = "limbo_extension"
version = "0.0.12"
dependencies = [
"limbo_macros",
"log",
]
[[package]]
name = "limbo_libsql"
version = "0.0.12"
dependencies = [
"limbo_core",
"thiserror 2.0.9",
"tokio",
]
[[package]]
name = "limbo_macros"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "limbo_sim"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"anarchist-readable-name-generator-lib",
"clap",
@@ -1256,7 +1289,7 @@ dependencies = [
[[package]]
name = "limbo_sqlite3"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"env_logger 0.11.5",
"libc",
@@ -1264,6 +1297,15 @@ dependencies = [
"log",
]
[[package]]
name = "limbo_uuid"
version = "0.0.12"
dependencies = [
"limbo_extension",
"log",
"uuid",
]
[[package]]
name = "linux-raw-sys"
version = "0.4.14"
@@ -1338,7 +1380,7 @@ checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1365,6 +1407,17 @@ dependencies = [
"adler2",
]
[[package]]
name = "mio"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
dependencies = [
"libc",
"wasi",
"windows-sys 0.52.0",
]
[[package]]
name = "mockall"
version = "0.13.1"
@@ -1388,7 +1441,7 @@ dependencies = [
"cfg-if",
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1524,7 +1577,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
dependencies = [
"memchr",
"thiserror 2.0.6",
"thiserror 2.0.9",
"ucd-trie",
]
@@ -1548,7 +1601,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1737,7 +1790,7 @@ dependencies = [
[[package]]
name = "py-limbo"
version = "0.0.11"
version = "0.0.12"
dependencies = [
"anyhow",
"limbo_core",
@@ -1794,7 +1847,7 @@ dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1807,7 +1860,7 @@ dependencies = [
"proc-macro2",
"pyo3-build-config",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -1821,9 +1874,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.37"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
@@ -1977,7 +2030,7 @@ dependencies = [
"regex",
"relative-path",
"rustc_version",
"syn 2.0.90",
"syn 2.0.96",
"unicode-ident",
]
@@ -2090,7 +2143,7 @@ checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -2129,6 +2182,15 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51bf3a9dccf2c079bf1465d449a485c85b36443caf765f2f127bfec28b180f75"
[[package]]
name = "signal-hook-registry"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1"
dependencies = [
"libc",
]
[[package]]
name = "siphasher"
version = "0.3.11"
@@ -2150,6 +2212,16 @@ version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "socket2"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "sqlite3-parser"
version = "0.13.0"
@@ -2249,9 +2321,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.90"
version = "2.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
dependencies = [
"proc-macro2",
"quote",
@@ -2323,11 +2395,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.6"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47"
checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc"
dependencies = [
"thiserror-impl 2.0.6",
"thiserror-impl 2.0.9",
]
[[package]]
@@ -2338,18 +2410,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
name = "thiserror-impl"
version = "2.0.6"
version = "2.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312"
checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]
[[package]]
@@ -2362,6 +2434,35 @@ dependencies = [
"serde_json",
]
[[package]]
name = "tokio"
version = "1.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551"
dependencies = [
"backtrace",
"bytes",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
]
[[package]]
name = "tokio-macros"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.96",
]
[[package]]
name = "tracing"
version = "0.1.41"
@@ -2493,7 +2594,7 @@ dependencies = [
"log",
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
"wasm-bindgen-shared",
]
@@ -2528,7 +2629,7 @@ checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -2821,5 +2922,5 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.90",
"syn 2.0.96",
]

View File

@@ -5,17 +5,18 @@ resolver = "2"
members = [
"bindings/java",
"bindings/python",
"bindings/rust",
"bindings/wasm",
"cli",
"sqlite3",
"core",
"simulator",
"test", "macros",
"test", "macros", "limbo_extension", "extensions/uuid",
]
exclude = ["perf/latency/limbo"]
[workspace.package]
version = "0.0.11"
version = "0.0.12"
authors = ["the Limbo authors"]
edition = "2021"
license = "MIT"

View File

@@ -62,10 +62,15 @@ limbo-wasm:
cargo build --package limbo-wasm --target wasm32-wasi
.PHONY: limbo-wasm
test: limbo test-compat test-sqlite3 test-shell
test: limbo test-compat test-sqlite3 test-shell test-extensions
.PHONY: test
test-shell: limbo
test-extensions: limbo
cargo build --package limbo_uuid
./testing/extensions.py
.PHONY: test-extensions
test-shell: limbo
SQLITE_EXEC=$(SQLITE_EXEC) ./testing/shelltests.py
.PHONY: test-shell

30
NOTICE.md Normal file
View File

@@ -0,0 +1,30 @@
Limbo
=======
Please visit our GitHub for more information:
* https://github.com/tursodatabase/limbo
Dependencies
============
This product depends on Error Prone, distributed by the Error Prone project:
* License: licenses/bindings/java/assertj-license.md (Apache License v2.0)
* Homepage: https://github.com/google/error-prone
This product depends on AssertJ, distributed by the AssertJ authors:
* License: licenses/bindings/java/errorprone-license.md (Apache License v2.0)
* Homepage: https://joel-costigliola.github.io/assertj/
This product depends on serde, distributed by the serde-rs project:
* License: licenses/core/serde-apache-license.md (Apache License v2.0)
* License: licenses/core/serde-mit-license.md (MIT License)
* Homepage: https://github.com/serde-rs/serde
This product depends on serde_json5, distributed
* License: licenses/core/serde_json5-license.md (Apache License v2.0)
* Homepage: https://github.com/google/serde_json5

View File

@@ -25,14 +25,12 @@
## Features
* In-process OLTP database engine library
* Asynchronous I/O support on Linux with `io_uring`
* SQLite compatibility ([status](COMPAT.md))
* SQL dialect support
* File format support
* SQLite C API
* JavaScript/WebAssembly bindings (_wip_)
* Support for Linux, macOS, and Windows
Limbo is an in-process OLTP database engine library that has:
* **Asynchronous I/O** support on Linux with `io_uring`
* **SQLite compatibility** [[doc](COMPAT.md)] for SQL dialect, file formats, and the C API
* **Language bindings** for JavaScript/WebAssembly, Rust, Python, and Java
* **OS support** for Linux, macOS, and Windows
## Getting Started

View File

@@ -1,16 +1,28 @@
import net.ltgt.gradle.errorprone.CheckSeverity
import net.ltgt.gradle.errorprone.errorprone
plugins {
java
application
id("net.ltgt.errorprone") version "3.1.0"
}
group = "org.github.tursodatabase"
version = "0.0.1-SNAPSHOT"
java {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
repositories {
mavenCentral()
}
dependencies {
errorprone("com.uber.nullaway:nullaway:0.10.26") // maximum version which supports java 8
errorprone("com.google.errorprone:error_prone_core:2.10.0") // maximum version which supports java 8
testImplementation(platform("org.junit:junit-bom:5.10.0"))
testImplementation("org.junit.jupiter:junit-jupiter")
testImplementation("org.assertj:assertj-core:3.27.0")
@@ -30,5 +42,27 @@ application {
tasks.test {
useJUnitPlatform()
// In order to find rust built file under resources, we need to set it as system path
systemProperty("java.library.path", "${System.getProperty("java.library.path")}:$projectDir/src/test/resources/limbo/debug")
systemProperty(
"java.library.path",
"${System.getProperty("java.library.path")}:$projectDir/src/test/resources/limbo/debug"
)
}
tasks.withType<JavaCompile> {
options.errorprone {
// Let's select which checks to perform. NullAway is enough for now.
disableAllChecks = true
check("NullAway", CheckSeverity.ERROR)
option("NullAway:AnnotatedPackages", "org.github.tursodatabase")
option(
"NullAway:CustomNullableAnnotations",
"org.github.tursodatabase.annotations.Nullable,org.github.tursodatabase.annotations.SkipNullableCheck"
)
}
if (name.lowercase().contains("test")) {
options.errorprone {
disable("NullAway")
}
}
}

View File

@@ -0,0 +1,78 @@
package org.github.tursodatabase;
import org.github.tursodatabase.annotations.Nullable;
import org.github.tursodatabase.annotations.SkipNullableCheck;
import org.github.tursodatabase.jdbc4.JDBC4Connection;
import java.sql.*;
import java.util.Locale;
import java.util.Properties;
import java.util.logging.Logger;
public class JDBC implements Driver {
private static final String VALID_URL_PREFIX = "jdbc:sqlite:";
static {
try {
DriverManager.registerDriver(new JDBC());
} catch (Exception e) {
// TODO: log
}
}
@Nullable
public static LimboConnection createConnection(String url, Properties properties) throws SQLException {
if (!isValidURL(url)) return null;
url = url.trim();
return new JDBC4Connection(url, extractAddress(url), properties);
}
private static boolean isValidURL(String url) {
return url != null && url.toLowerCase(Locale.ROOT).startsWith(VALID_URL_PREFIX);
}
private static String extractAddress(String url) {
return url.substring(VALID_URL_PREFIX.length());
}
@Nullable
@Override
public Connection connect(String url, Properties info) throws SQLException {
return createConnection(url, info);
}
@Override
public boolean acceptsURL(String url) throws SQLException {
return isValidURL(url);
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
return LimboConfig.getDriverPropertyInfo();
}
@Override
public int getMajorVersion() {
// TODO
return 0;
}
@Override
public int getMinorVersion() {
// TODO
return 0;
}
@Override
public boolean jdbcCompliant() {
return false;
}
@Override
@SkipNullableCheck
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
// TODO
return null;
}
}

View File

@@ -0,0 +1,51 @@
package org.github.tursodatabase;
import java.sql.DriverPropertyInfo;
import java.util.Arrays;
import java.util.Properties;
/**
* Limbo Configuration.
*/
public class LimboConfig {
private final Properties pragma;
public LimboConfig(Properties properties) {
this.pragma = properties;
}
public static DriverPropertyInfo[] getDriverPropertyInfo() {
return Arrays.stream(Pragma.values())
.map(p -> {
DriverPropertyInfo info = new DriverPropertyInfo(p.pragmaName, null);
info.description = p.description;
info.choices = p.choices;
info.required = false;
return info;
})
.toArray(DriverPropertyInfo[]::new);
}
public Properties toProperties() {
Properties copy = new Properties();
copy.putAll(pragma);
return copy;
}
public enum Pragma {
;
private final String pragmaName;
private final String description;
private final String[] choices;
Pragma(String pragmaName, String description, String[] choices) {
this.pragmaName = pragmaName;
this.description = description;
this.choices = choices;
}
public String getPragmaName() {
return pragmaName;
}
}
}

View File

@@ -0,0 +1,104 @@
package org.github.tursodatabase;
import org.github.tursodatabase.core.AbstractDB;
import org.github.tursodatabase.core.LimboDB;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
public abstract class LimboConnection implements Connection {
private final AbstractDB database;
public LimboConnection(AbstractDB database) {
this.database = database;
}
public LimboConnection(String url, String fileName) throws SQLException {
this(url, fileName, new Properties());
}
/**
* Creates a connection to limbo database.
*
* @param url e.g. "jdbc:sqlite:fileName"
* @param fileName path to file
*/
public LimboConnection(String url, String fileName, Properties properties) throws SQLException {
AbstractDB db = null;
try {
db = open(url, fileName, properties);
} catch (Throwable t) {
try {
if (db != null) {
db.close();
}
} catch (Throwable t2) {
t.addSuppressed(t2);
}
throw t;
}
this.database = db;
}
private static AbstractDB open(String url, String fileName, Properties properties) throws SQLException {
if (fileName.isEmpty()) {
throw new IllegalArgumentException("fileName should not be empty");
}
final AbstractDB database;
try {
LimboDB.load();
database = LimboDB.create(url, fileName);
} catch (Exception e) {
throw new SQLException("Error opening connection", e);
}
database.open(0);
return database;
}
protected void checkOpen() throws SQLException {
if (isClosed()) throw new SQLException("database connection closed");
}
@Override
public void close() throws SQLException {
if (isClosed()) return;
database.close();
}
@Override
public boolean isClosed() throws SQLException {
return database.isClosed();
}
// TODO: check whether this is still valid for limbo
/**
* Checks whether the type, concurrency, and holdability settings for a {@link ResultSet} are
* supported by the SQLite interface. Supported settings are:
*
* <ul>
* <li>type: {@link ResultSet#TYPE_FORWARD_ONLY}
* <li>concurrency: {@link ResultSet#CONCUR_READ_ONLY})
* <li>holdability: {@link ResultSet#CLOSE_CURSORS_AT_COMMIT}
* </ul>
*
* @param resultSetType the type setting.
* @param resultSetConcurrency the concurrency setting.
* @param resultSetHoldability the holdability setting.
*/
protected void checkCursor(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
if (resultSetType != ResultSet.TYPE_FORWARD_ONLY)
throw new SQLException("SQLite only supports TYPE_FORWARD_ONLY cursors");
if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY)
throw new SQLException("SQLite only supports CONCUR_READ_ONLY cursors");
if (resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT)
throw new SQLException("SQLite only supports closing cursors at commit");
}
}

View File

@@ -0,0 +1,88 @@
package org.github.tursodatabase;
import org.github.tursodatabase.annotations.Nullable;
import org.github.tursodatabase.annotations.SkipNullableCheck;
import javax.sql.DataSource;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Properties;
import java.util.logging.Logger;
/**
* Provides {@link DataSource} API for configuring Limbo database connection.
*/
public class LimboDataSource implements DataSource {
private final LimboConfig limboConfig;
private final String url;
/**
* Creates a datasource based on the provided configuration.
*
* @param limboConfig The configuration for the datasource.
*/
public LimboDataSource(LimboConfig limboConfig, String url) {
this.limboConfig = limboConfig;
this.url = url;
}
@Override
@Nullable
public Connection getConnection() throws SQLException {
return getConnection(null, null);
}
@Override
@Nullable
public Connection getConnection(@Nullable String username, @Nullable String password) throws SQLException {
Properties properties = limboConfig.toProperties();
if (username != null) properties.put("user", username);
if (password != null) properties.put("pass", password);
return JDBC.createConnection(url, properties);
}
@Override
@SkipNullableCheck
public PrintWriter getLogWriter() throws SQLException {
// TODO
return null;
}
@Override
public void setLogWriter(PrintWriter out) throws SQLException {
// TODO
}
@Override
public void setLoginTimeout(int seconds) throws SQLException {
// TODO
}
@Override
public int getLoginTimeout() throws SQLException {
// TODO
return 0;
}
@Override
@SkipNullableCheck
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO
return null;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO
return false;
}
}

View File

@@ -1,4 +1,4 @@
package org.github.tursodatabase;
package org.github.tursodatabase.annotations;
import java.lang.annotation.ElementType;

View File

@@ -0,0 +1,18 @@
package org.github.tursodatabase.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to mark nullable types.
* <p>
* This annotation is used to indicate that a method, field, or parameter can be null.
* It helps in identifying potential nullability issues and improving code quality.
*/
@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER})
public @interface Nullable {
}

View File

@@ -0,0 +1,18 @@
package org.github.tursodatabase.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marker annotation to skip nullable checks.
* <p>
* This annotation is used to mark methods, fields, or parameters that should be excluded from nullable checks.
* It is typically applied to code that is still under development or requires special handling.
*/
@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER})
public @interface SkipNullableCheck {
}

View File

@@ -1,4 +1,4 @@
package org.github.tursodatabase;
package org.github.tursodatabase.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;

View File

@@ -0,0 +1,104 @@
/*
* Copyright (c) 2007 David Crawshaw <david@zentus.com>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package org.github.tursodatabase.core;
public class Codes {
/** Successful result */
public static final int SQLITE_OK = 0;
/** SQL error or missing database */
public static final int SQLITE_ERROR = 1;
/** An internal logic error in SQLite */
public static final int SQLITE_INTERNAL = 2;
/** Access permission denied */
public static final int SQLITE_PERM = 3;
/** Callback routine requested an abort */
public static final int SQLITE_ABORT = 4;
/** The database file is locked */
public static final int SQLITE_BUSY = 5;
/** A table in the database is locked */
public static final int SQLITE_LOCKED = 6;
/** A malloc() failed */
public static final int SQLITE_NOMEM = 7;
/** Attempt to write a readonly database */
public static final int SQLITE_READONLY = 8;
/** Operation terminated by sqlite_interrupt() */
public static final int SQLITE_INTERRUPT = 9;
/** Some kind of disk I/O error occurred */
public static final int SQLITE_IOERR = 10;
/** The database disk image is malformed */
public static final int SQLITE_CORRUPT = 11;
/** (Internal Only) Table or record not found */
public static final int SQLITE_NOTFOUND = 12;
/** Insertion failed because database is full */
public static final int SQLITE_FULL = 13;
/** Unable to open the database file */
public static final int SQLITE_CANTOPEN = 14;
/** Database lock protocol error */
public static final int SQLITE_PROTOCOL = 15;
/** (Internal Only) Database table is empty */
public static final int SQLITE_EMPTY = 16;
/** The database schema changed */
public static final int SQLITE_SCHEMA = 17;
/** Too much data for one row of a table */
public static final int SQLITE_TOOBIG = 18;
/** Abort due to constraint violation */
public static final int SQLITE_CONSTRAINT = 19;
/** Data type mismatch */
public static final int SQLITE_MISMATCH = 20;
/** Library used incorrectly */
public static final int SQLITE_MISUSE = 21;
/** Uses OS features not supported on host */
public static final int SQLITE_NOLFS = 22;
/** Authorization denied */
public static final int SQLITE_AUTH = 23;
/** sqlite_step() has another row ready */
public static final int SQLITE_ROW = 100;
/** sqlite_step() has finished executing */
public static final int SQLITE_DONE = 101;
// types returned by sqlite3_column_type()
public static final int SQLITE_INTEGER = 1;
public static final int SQLITE_FLOAT = 2;
public static final int SQLITE_TEXT = 3;
public static final int SQLITE_BLOB = 4;
public static final int SQLITE_NULL = 5;
}

View File

@@ -1,5 +1,26 @@
package org.github.tursodatabase.core;
// TODO: add fields and methods
public class CoreStatement {
import org.github.tursodatabase.LimboConnection;
import java.sql.SQLException;
public abstract class CoreStatement {
private final LimboConnection connection;
protected CoreStatement(LimboConnection connection) {
this.connection = connection;
}
protected void internalClose() throws SQLException {
// TODO
}
protected void clearGeneratedKeys() throws SQLException {
// TODO
}
protected void updateGeneratedKeys() throws SQLException {
// TODO
}
}

View File

@@ -2,8 +2,9 @@ package org.github.tursodatabase.core;
import org.github.tursodatabase.LimboErrorCode;
import org.github.tursodatabase.NativeInvocation;
import org.github.tursodatabase.VisibleForTesting;
import org.github.tursodatabase.annotations.NativeInvocation;
import org.github.tursodatabase.annotations.VisibleForTesting;
import org.github.tursodatabase.annotations.Nullable;
import org.github.tursodatabase.exceptions.LimboException;
import java.nio.charset.StandardCharsets;
@@ -30,6 +31,19 @@ public final class LimboDB extends AbstractDB {
}
}
/**
* Loads the SQLite interface backend.
*/
public static void load() {
if (isLoaded) return;
try {
System.loadLibrary("_limbo_java");
} finally {
isLoaded = true;
}
}
/**
* @param url e.g. "jdbc:sqlite:fileName
* @param fileName e.g. path to file
@@ -43,19 +57,6 @@ public final class LimboDB extends AbstractDB {
super(url, fileName);
}
/**
* Loads the SQLite interface backend.
*/
public void load() {
if (isLoaded) return;
try {
System.loadLibrary("_limbo_java");
} finally {
isLoaded = true;
}
}
// WRAPPER FUNCTIONS ////////////////////////////////////////////
// TODO: add support for JNI
@@ -82,9 +83,15 @@ public final class LimboDB extends AbstractDB {
@Override
protected void open0(String fileName, int openFlags) throws SQLException {
if (isOpen) {
throwLimboException(LimboErrorCode.UNKNOWN_ERROR.code, "Already opened");
throw buildLimboException(LimboErrorCode.ETC.code, "Already opened");
}
dbPtr = openUtf8(stringToUtf8ByteArray(fileName), openFlags);
byte[] fileNameBytes = stringToUtf8ByteArray(fileName);
if (fileNameBytes == null) {
throw buildLimboException(LimboErrorCode.ETC.code, "File name cannot be converted to byteArray. File name: " + fileName);
}
dbPtr = openUtf8(fileNameBytes, openFlags);
isOpen = true;
}
@@ -114,7 +121,7 @@ public final class LimboDB extends AbstractDB {
@NativeInvocation
private void throwLimboException(int errorCode, byte[] errorMessageBytes) throws SQLException {
String errorMessage = utf8ByteBufferToString(errorMessageBytes);
throwLimboException(errorCode, errorMessage);
throw buildLimboException(errorCode, errorMessage);
}
/**
@@ -123,7 +130,7 @@ public final class LimboDB extends AbstractDB {
* @param errorCode Error code.
* @param errorMessage Error message.
*/
public void throwLimboException(int errorCode, String errorMessage) throws SQLException {
public LimboException buildLimboException(int errorCode, @Nullable String errorMessage) throws SQLException {
LimboErrorCode code = LimboErrorCode.getErrorCode(errorCode);
String msg;
if (code == LimboErrorCode.UNKNOWN_ERROR) {
@@ -132,10 +139,11 @@ public final class LimboDB extends AbstractDB {
msg = String.format("%s (%s)", code, errorMessage);
}
throw new LimboException(msg, code);
return new LimboException(msg, code);
}
private static String utf8ByteBufferToString(byte[] buffer) {
@Nullable
private static String utf8ByteBufferToString(@Nullable byte[] buffer) {
if (buffer == null) {
return null;
}
@@ -143,7 +151,8 @@ public final class LimboDB extends AbstractDB {
return new String(buffer, StandardCharsets.UTF_8);
}
private static byte[] stringToUtf8ByteArray(String str) {
@Nullable
private static byte[] stringToUtf8ByteArray(@Nullable String str) {
if (str == null) {
return null;
}

View File

@@ -0,0 +1,349 @@
package org.github.tursodatabase.jdbc4;
import org.github.tursodatabase.LimboConnection;
import org.github.tursodatabase.annotations.SkipNullableCheck;
import java.sql.*;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
public class JDBC4Connection extends LimboConnection {
public JDBC4Connection(String url, String fileName, Properties properties) throws SQLException {
super(url, fileName, properties);
}
@Override
public Statement createStatement() throws SQLException {
return createStatement(
ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT
);
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
return createStatement(resultSetType, resultSetConcurrency, ResultSet.CLOSE_CURSORS_AT_COMMIT);
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
checkOpen();
checkCursor(resultSetType, resultSetConcurrency, resultSetHoldability);
return new JDBC4Statement(this);
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public CallableStatement prepareCall(String sql) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public String nativeSQL(String sql) throws SQLException {
// TODO
return "";
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
// TODO
}
@Override
public boolean getAutoCommit() throws SQLException {
// TODO
return false;
}
@Override
public void commit() throws SQLException {
// TODO
}
@Override
public void rollback() throws SQLException {
// TODO
}
@Override
public void close() throws SQLException {
// TODO
}
@Override
public boolean isClosed() throws SQLException {
// TODO
return false;
}
@Override
@SkipNullableCheck
public DatabaseMetaData getMetaData() throws SQLException {
// TODO
return null;
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
// TODO
}
@Override
public boolean isReadOnly() throws SQLException {
// TODO
return false;
}
@Override
public void setCatalog(String catalog) throws SQLException {
// TODO
}
@Override
public String getCatalog() throws SQLException {
// TODO
return "";
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
// TODO
}
@Override
public int getTransactionIsolation() throws SQLException {
// TODO
return 0;
}
@Override
@SkipNullableCheck
public SQLWarning getWarnings() throws SQLException {
// TODO
return null;
}
@Override
public void clearWarnings() throws SQLException {
// TODO
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
// TODO
return null;
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
// TODO
return new HashMap<>();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
// TODO
}
@Override
public void setHoldability(int holdability) throws SQLException {
// TODO
}
@Override
public int getHoldability() throws SQLException {
return 0;
}
@Override
@SkipNullableCheck
public Savepoint setSavepoint() throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public Savepoint setSavepoint(String name) throws SQLException {
// TODO
return null;
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
// TODO
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
// TODO
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public Clob createClob() throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public Blob createBlob() throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public NClob createNClob() throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public SQLXML createSQLXML() throws SQLException {
// TODO
return null;
}
@Override
public boolean isValid(int timeout) throws SQLException {
// TODO
return false;
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
// TODO
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
// TODO
}
@Override
public String getClientInfo(String name) throws SQLException {
// TODO
return "";
}
@Override
@SkipNullableCheck
public Properties getClientInfo() throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
// TODO
return null;
}
@Override
@SkipNullableCheck
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
// TODO
return null;
}
@Override
public void setSchema(String schema) throws SQLException {
// TODO
}
@Override
@SkipNullableCheck
public String getSchema() throws SQLException {
// TODO
return "";
}
@Override
public void abort(Executor executor) throws SQLException {
// TODO
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
// TODO
}
@Override
public int getNetworkTimeout() throws SQLException {
// TODO
return 0;
}
@Override
@SkipNullableCheck
public <T> T unwrap(Class<T> iface) throws SQLException {
return null;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO
return false;
}
}

View File

@@ -0,0 +1,290 @@
package org.github.tursodatabase.jdbc4;
import org.github.tursodatabase.LimboConnection;
import org.github.tursodatabase.annotations.SkipNullableCheck;
import org.github.tursodatabase.core.CoreStatement;
import java.sql.*;
/**
* Implementation of the {@link Statement} interface for JDBC 4.
*/
public class JDBC4Statement extends CoreStatement implements Statement {
private boolean closed;
private boolean closeOnCompletion;
private final int resultSetType;
private final int resultSetConcurrency;
private final int resultSetHoldability;
public JDBC4Statement(LimboConnection connection) {
this(connection, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
}
public JDBC4Statement(LimboConnection connection, int resultSetType, int resultSetConcurrency, int resultSetHoldability) {
super(connection);
this.resultSetType = resultSetType;
this.resultSetConcurrency = resultSetConcurrency;
this.resultSetHoldability = resultSetHoldability;
}
@Override
@SkipNullableCheck
public ResultSet executeQuery(String sql) throws SQLException {
// TODO
return null;
}
@Override
public int executeUpdate(String sql) throws SQLException {
// TODO
return 0;
}
@Override
public void close() throws SQLException {
clearGeneratedKeys();
internalClose();
closed = true;
}
@Override
public int getMaxFieldSize() throws SQLException {
// TODO
return 0;
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
// TODO
}
@Override
public int getMaxRows() throws SQLException {
// TODO
return 0;
}
@Override
public void setMaxRows(int max) throws SQLException {
// TODO
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
// TODO
}
@Override
public int getQueryTimeout() throws SQLException {
// TODO
return 0;
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
// TODO
}
@Override
public void cancel() throws SQLException {
// TODO
}
@Override
@SkipNullableCheck
public SQLWarning getWarnings() throws SQLException {
// TODO
return null;
}
@Override
public void clearWarnings() throws SQLException {
// TODO
}
@Override
public void setCursorName(String name) throws SQLException {
// TODO
}
@Override
public boolean execute(String sql) throws SQLException {
// TODO
return false;
}
@Override
@SkipNullableCheck
public ResultSet getResultSet() throws SQLException {
// TODO
return null;
}
@Override
public int getUpdateCount() throws SQLException {
// TODO
return 0;
}
@Override
public boolean getMoreResults() throws SQLException {
// TODO
return false;
}
@Override
public void setFetchDirection(int direction) throws SQLException {
// TODO
}
@Override
public int getFetchDirection() throws SQLException {
// TODO
return 0;
}
@Override
public void setFetchSize(int rows) throws SQLException {
// TODO
}
@Override
public int getFetchSize() throws SQLException {
// TODO
return 0;
}
@Override
public int getResultSetConcurrency() {
return resultSetConcurrency;
}
@Override
public int getResultSetType() {
return resultSetType;
}
@Override
public void addBatch(String sql) throws SQLException {
// TODO
}
@Override
public void clearBatch() throws SQLException {
// TODO
}
@Override
public int[] executeBatch() throws SQLException {
// TODO
return new int[0];
}
@Override
@SkipNullableCheck
public Connection getConnection() throws SQLException {
// TODO
return null;
}
@Override
public boolean getMoreResults(int current) throws SQLException {
// TODO
return false;
}
@Override
@SkipNullableCheck
public ResultSet getGeneratedKeys() throws SQLException {
// TODO
return null;
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
// TODO
return 0;
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
// TODO
return 0;
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
// TODO
return 0;
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
// TODO
return false;
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
// TODO
return false;
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
// TODO
return false;
}
@Override
public int getResultSetHoldability() {
return resultSetHoldability;
}
@Override
public boolean isClosed() throws SQLException {
// TODO
return false;
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
// TODO
}
@Override
public boolean isPoolable() throws SQLException {
// TODO
return false;
}
@Override
public void closeOnCompletion() throws SQLException {
if (closed) throw new SQLException("statement is closed");
closeOnCompletion = true;
}
/**
* Indicates whether the statement should be closed automatically when all its dependent result sets are closed.
*/
@Override
public boolean isCloseOnCompletion() throws SQLException {
if (closed) throw new SQLException("statement is closed");
return closeOnCompletion;
}
@Override
@SkipNullableCheck
public <T> T unwrap(Class<T> iface) throws SQLException {
// TODO
return null;
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
// TODO
return false;
}
}

View File

@@ -0,0 +1 @@
org.github.tursodatabase.JDBC

View File

@@ -0,0 +1,33 @@
package org.github.tursodatabase;
import org.junit.jupiter.api.Test;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
class JDBCTest {
@Test
void null_is_returned_when_invalid_url_is_passed() throws Exception {
LimboConnection connection = JDBC.createConnection("jdbc:invalid:xxx", new Properties());
assertThat(connection).isNull();
}
@Test
void non_null_connection_is_returned_when_valid_url_is_passed() throws Exception {
String fileUrl = TestUtils.createTempFile();
LimboConnection connection = JDBC.createConnection("jdbc:sqlite:" + fileUrl, new Properties());
assertThat(connection).isNotNull();
}
@Test
void connection_can_be_retrieved_from_DriverManager() throws SQLException {
try (Connection connection = DriverManager.getConnection("jdbc:sqlite:sample.db")) {
assertThat(connection).isNotNull();
}
}
}

View File

@@ -15,16 +15,16 @@ public class LimboDBTest {
@Test
void db_should_open_normally() throws Exception {
String dbPath = TestUtils.createTempFile();
LimboDB.load();
LimboDB db = LimboDB.create("jdbc:sqlite" + dbPath, dbPath);
db.load();
db.open(0);
}
@Test
void should_throw_exception_when_opened_twice() throws Exception {
String dbPath = TestUtils.createTempFile();
LimboDB.load();
LimboDB db = LimboDB.create("jdbc:sqlite:" + dbPath, dbPath);
db.load();
db.open(0);
assertThatThrownBy(() -> db.open(0)).isInstanceOf(SQLException.class);
@@ -33,8 +33,8 @@ public class LimboDBTest {
@Test
void throwJavaException_should_throw_appropriate_java_exception() throws Exception {
String dbPath = TestUtils.createTempFile();
LimboDB.load();
LimboDB db = LimboDB.create("jdbc:sqlite:" + dbPath, dbPath);
db.load();
final int limboExceptionCode = LimboErrorCode.ETC.code;
try {

View File

@@ -0,0 +1,58 @@
package org.github.tursodatabase.jdbc4;
import org.github.tursodatabase.TestUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import static org.junit.jupiter.api.Assertions.*;
import static org.junit.jupiter.api.Assertions.assertThrows;
class JDBC4ConnectionTest {
private JDBC4Connection connection;
@BeforeEach
void setUp() throws Exception {
String fileUrl = TestUtils.createTempFile();
String url = "jdbc:sqlite:" + fileUrl;
connection = new JDBC4Connection(url, fileUrl, new Properties());
}
@Test
void test_create_statement_valid() throws SQLException {
Statement stmt = connection.createStatement();
assertNotNull(stmt);
assertEquals(ResultSet.TYPE_FORWARD_ONLY, stmt.getResultSetType());
assertEquals(ResultSet.CONCUR_READ_ONLY, stmt.getResultSetConcurrency());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmt.getResultSetHoldability());
}
@Test
void test_create_statement_with_type_and_concurrency_valid() throws SQLException {
Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
assertNotNull(stmt);
assertEquals(ResultSet.TYPE_FORWARD_ONLY, stmt.getResultSetType());
assertEquals(ResultSet.CONCUR_READ_ONLY, stmt.getResultSetConcurrency());
}
@Test
void test_create_statement_with_all_params_valid() throws SQLException {
Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT);
assertNotNull(stmt);
assertEquals(ResultSet.TYPE_FORWARD_ONLY, stmt.getResultSetType());
assertEquals(ResultSet.CONCUR_READ_ONLY, stmt.getResultSetConcurrency());
assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmt.getResultSetHoldability());
}
@Test
void test_create_statement_invalid() {
assertThrows(SQLException.class, () -> {
connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, -1);
});
}
}

16
bindings/rust/Cargo.toml Normal file
View File

@@ -0,0 +1,16 @@
# Copyright 2025 the Limbo authors. All rights reserved. MIT license.
[package]
name = "limbo_libsql"
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
[dependencies]
limbo_core = { path = "../../core" }
thiserror = "2.0.9"
[dev-dependencies]
tokio = { version = "1.29.1", features = ["full"] }

128
bindings/rust/src/lib.rs Normal file
View File

@@ -0,0 +1,128 @@
pub mod params;
mod value;
pub use params::params_from_iter;
use crate::params::*;
use crate::value::*;
use std::rc::Rc;
use std::sync::Arc;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("SQL conversion failure: `{0}`")]
ToSqlConversionFailure(crate::BoxError),
}
impl From<limbo_core::LimboError> for Error {
fn from(_err: limbo_core::LimboError) -> Self {
todo!();
}
}
pub(crate) type BoxError = Box<dyn std::error::Error + Send + Sync>;
pub type Result<T> = std::result::Result<T, Error>;
pub struct Builder {
path: String,
}
impl Builder {
pub fn new_local(path: &str) -> Self {
Self {
path: path.to_string(),
}
}
#[allow(unused_variables, clippy::arc_with_non_send_sync)]
pub async fn build(self) -> Result<Database> {
match self.path.as_str() {
":memory:" => {
let io: Arc<dyn limbo_core::IO> = Arc::new(limbo_core::MemoryIO::new()?);
let db = limbo_core::Database::open_file(io, self.path.as_str())?;
Ok(Database { inner: db })
}
_ => todo!(),
}
}
}
pub struct Database {
inner: Arc<limbo_core::Database>,
}
impl Database {
pub fn connect(self) -> Result<Connection> {
let conn = self.inner.connect();
Ok(Connection { inner: conn })
}
}
pub struct Connection {
inner: Rc<limbo_core::Connection>,
}
impl Connection {
pub async fn query(&self, sql: &str, params: impl IntoParams) -> Result<Rows> {
let mut stmt = self.prepare(sql).await?;
stmt.query(params).await
}
pub async fn execute(&self, sql: &str, params: impl IntoParams) -> Result<u64> {
let mut stmt = self.prepare(sql).await?;
stmt.execute(params).await
}
pub async fn prepare(&self, sql: &str) -> Result<Statement> {
let stmt = self.inner.prepare(sql)?;
Ok(Statement {
_inner: Rc::new(stmt),
})
}
}
pub struct Statement {
_inner: Rc<limbo_core::Statement>,
}
impl Statement {
pub async fn query(&mut self, params: impl IntoParams) -> Result<Rows> {
let _params = params.into_params()?;
todo!();
}
pub async fn execute(&mut self, params: impl IntoParams) -> Result<u64> {
let _params = params.into_params()?;
todo!();
}
}
pub trait IntoValue {
fn into_value(self) -> Result<Value>;
}
#[derive(Debug, Clone)]
pub enum Params {
None,
Positional(Vec<Value>),
Named(Vec<(String, Value)>),
}
pub struct Transaction {}
pub struct Rows {
_inner: Rc<limbo_core::Rows>,
}
impl Rows {
pub async fn next(&mut self) -> Result<Option<Row>> {
todo!();
}
}
pub struct Row {}
impl Row {
pub fn get_value(&self, _index: usize) -> Result<Value> {
todo!();
}
}

313
bindings/rust/src/params.rs Normal file
View File

@@ -0,0 +1,313 @@
//! This module contains all `Param` related utilities and traits.
use crate::{Error, Result, Value};
mod sealed {
pub trait Sealed {}
}
use sealed::Sealed;
/// Converts some type into parameters that can be passed
/// to libsql.
///
/// The trait is sealed and not designed to be implemented by hand
/// but instead provides a few ways to use it.
///
/// # Passing parameters to libsql
///
/// Many functions in this library let you pass parameters to libsql. Doing this
/// lets you avoid any risk of SQL injection, and is simpler than escaping
/// things manually. These functions generally contain some paramter that generically
/// accepts some implementation this trait.
///
/// # Positional parameters
///
/// These can be supplied in a few ways:
///
/// - For heterogeneous parameter lists of 16 or less items a tuple syntax is supported
/// by doing `(1, "foo")`.
/// - For hetergeneous parameter lists of 16 or greater, the [`limbo_libsql::params!`] is supported
/// by doing `limbo_libsql::params![1, "foo"]`.
/// - For homogeneous paramter types (where they are all the same type), const arrays are
/// supported by doing `[1, 2, 3]`.
///
/// # Example (positional)
///
/// ```rust,no_run
/// # use limbo_libsql::{Connection, params};
/// # async fn run(conn: Connection) -> limbo_libsql::Result<()> {
/// let mut stmt = conn.prepare("INSERT INTO test (a, b) VALUES (?1, ?2)").await?;
///
/// // Using a tuple:
/// stmt.execute((0, "foobar")).await?;
///
/// // Using `limbo_libsql::params!`:
/// stmt.execute(params![1i32, "blah"]).await?;
///
/// // array literal — non-references
/// stmt.execute([2i32, 3i32]).await?;
///
/// // array literal — references
/// stmt.execute(["foo", "bar"]).await?;
///
/// // Slice literal, references:
/// stmt.execute([2i32, 3i32]).await?;
///
/// # Ok(())
/// # }
/// ```
///
/// # Named paramters
///
/// - For heterogeneous parameter lists of 16 or less items a tuple syntax is supported
/// by doing `(("key1", 1), ("key2", "foo"))`.
/// - For hetergeneous parameter lists of 16 or greater, the [`limbo_libsql::params!`] is supported
/// by doing `limbo_libsql::named_params!["key1": 1, "key2": "foo"]`.
/// - For homogeneous paramter types (where they are all the same type), const arrays are
/// supported by doing `[("key1", 1), ("key2, 2), ("key3", 3)]`.
///
/// # Example (named)
///
/// ```rust,no_run
/// # use limbo_libsql::{Connection, named_params};
/// # async fn run(conn: Connection) -> limbo_libsql::Result<()> {
/// let mut stmt = conn.prepare("INSERT INTO test (a, b) VALUES (:key1, :key2)").await?;
///
/// // Using a tuple:
/// stmt.execute(((":key1", 0), (":key2", "foobar"))).await?;
///
/// // Using `limbo_libsql::named_params!`:
/// stmt.execute(named_params! {":key1": 1i32, ":key2": "blah" }).await?;
///
/// // const array:
/// stmt.execute([(":key1", 2i32), (":key2", 3i32)]).await?;
///
/// # Ok(())
/// # }
/// ```
pub trait IntoParams: Sealed {
// Hide this because users should not be implementing this
// themselves. We should consider sealing this trait.
#[doc(hidden)]
fn into_params(self) -> Result<Params>;
}
#[derive(Debug, Clone)]
#[doc(hidden)]
pub enum Params {
None,
Positional(Vec<Value>),
Named(Vec<(String, Value)>),
}
/// Convert an owned iterator into Params.
///
/// # Example
///
/// ```rust
/// # use limbo_libsql::{Connection, params_from_iter, Rows};
/// # async fn run(conn: &Connection) {
///
/// let iter = vec![1, 2, 3];
///
/// conn.query(
/// "SELECT * FROM users WHERE id IN (?1, ?2, ?3)",
/// params_from_iter(iter)
/// )
/// .await
/// .unwrap();
/// # }
/// ```
pub fn params_from_iter<I>(iter: I) -> impl IntoParams
where
I: IntoIterator,
I::Item: IntoValue,
{
iter.into_iter().collect::<Vec<_>>()
}
impl Sealed for () {}
impl IntoParams for () {
fn into_params(self) -> Result<Params> {
Ok(Params::None)
}
}
impl Sealed for Params {}
impl IntoParams for Params {
fn into_params(self) -> Result<Params> {
Ok(self)
}
}
impl<T: IntoValue> Sealed for Vec<T> {}
impl<T: IntoValue> IntoParams for Vec<T> {
fn into_params(self) -> Result<Params> {
let values = self
.into_iter()
.map(|i| i.into_value())
.collect::<Result<Vec<_>>>()?;
Ok(Params::Positional(values))
}
}
impl<T: IntoValue> Sealed for Vec<(String, T)> {}
impl<T: IntoValue> IntoParams for Vec<(String, T)> {
fn into_params(self) -> Result<Params> {
let values = self
.into_iter()
.map(|(k, v)| Ok((k, v.into_value()?)))
.collect::<Result<Vec<_>>>()?;
Ok(Params::Named(values))
}
}
impl<T: IntoValue, const N: usize> Sealed for [T; N] {}
impl<T: IntoValue, const N: usize> IntoParams for [T; N] {
fn into_params(self) -> Result<Params> {
self.into_iter().collect::<Vec<_>>().into_params()
}
}
impl<T: IntoValue, const N: usize> Sealed for [(&str, T); N] {}
impl<T: IntoValue, const N: usize> IntoParams for [(&str, T); N] {
fn into_params(self) -> Result<Params> {
self.into_iter()
// TODO: Pretty unfortunate that we need to allocate here when we know
// the str is likely 'static. Maybe we should convert our param names
// to be `Cow<'static, str>`?
.map(|(k, v)| Ok((k.to_string(), v.into_value()?)))
.collect::<Result<Vec<_>>>()?
.into_params()
}
}
impl<T: IntoValue + Clone, const N: usize> Sealed for &[T; N] {}
impl<T: IntoValue + Clone, const N: usize> IntoParams for &[T; N] {
fn into_params(self) -> Result<Params> {
self.iter().cloned().collect::<Vec<_>>().into_params()
}
}
// NOTICE: heavily inspired by rusqlite
macro_rules! tuple_into_params {
($count:literal : $(($field:tt $ftype:ident)),* $(,)?) => {
impl<$($ftype,)*> Sealed for ($($ftype,)*) where $($ftype: IntoValue,)* {}
impl<$($ftype,)*> IntoParams for ($($ftype,)*) where $($ftype: IntoValue,)* {
fn into_params(self) -> Result<Params> {
let params = Params::Positional(vec![$(self.$field.into_value()?),*]);
Ok(params)
}
}
}
}
macro_rules! named_tuple_into_params {
($count:literal : $(($field:tt $ftype:ident)),* $(,)?) => {
impl<$($ftype,)*> Sealed for ($((&str, $ftype),)*) where $($ftype: IntoValue,)* {}
impl<$($ftype,)*> IntoParams for ($((&str, $ftype),)*) where $($ftype: IntoValue,)* {
fn into_params(self) -> Result<Params> {
let params = Params::Named(vec![$((self.$field.0.to_string(), self.$field.1.into_value()?)),*]);
Ok(params)
}
}
}
}
named_tuple_into_params!(2: (0 A), (1 B));
named_tuple_into_params!(3: (0 A), (1 B), (2 C));
named_tuple_into_params!(4: (0 A), (1 B), (2 C), (3 D));
named_tuple_into_params!(5: (0 A), (1 B), (2 C), (3 D), (4 E));
named_tuple_into_params!(6: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F));
named_tuple_into_params!(7: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G));
named_tuple_into_params!(8: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H));
named_tuple_into_params!(9: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I));
named_tuple_into_params!(10: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J));
named_tuple_into_params!(11: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K));
named_tuple_into_params!(12: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L));
named_tuple_into_params!(13: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M));
named_tuple_into_params!(14: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N));
named_tuple_into_params!(15: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N), (14 O));
named_tuple_into_params!(16: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N), (14 O), (15 P));
tuple_into_params!(2: (0 A), (1 B));
tuple_into_params!(3: (0 A), (1 B), (2 C));
tuple_into_params!(4: (0 A), (1 B), (2 C), (3 D));
tuple_into_params!(5: (0 A), (1 B), (2 C), (3 D), (4 E));
tuple_into_params!(6: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F));
tuple_into_params!(7: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G));
tuple_into_params!(8: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H));
tuple_into_params!(9: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I));
tuple_into_params!(10: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J));
tuple_into_params!(11: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K));
tuple_into_params!(12: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L));
tuple_into_params!(13: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M));
tuple_into_params!(14: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N));
tuple_into_params!(15: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N), (14 O));
tuple_into_params!(16: (0 A), (1 B), (2 C), (3 D), (4 E), (5 F), (6 G), (7 H), (8 I), (9 J), (10 K), (11 L), (12 M), (13 N), (14 O), (15 P));
// TODO: Should we rename this to `ToSql` which makes less sense but
// matches the error variant we have in `Error`. Or should we change the
// error variant to match this breaking the few people that currently use
// this error variant.
pub trait IntoValue {
fn into_value(self) -> Result<Value>;
}
impl<T> IntoValue for T
where
T: TryInto<Value>,
T::Error: Into<crate::BoxError>,
{
fn into_value(self) -> Result<Value> {
self.try_into()
.map_err(|e| Error::ToSqlConversionFailure(e.into()))
}
}
impl IntoValue for Result<Value> {
fn into_value(self) -> Result<Value> {
self
}
}
/// Construct positional params from a hetergeneous set of params types.
#[macro_export]
macro_rules! params {
() => {
()
};
($($value:expr),* $(,)?) => {{
use $crate::params::IntoValue;
[$($value.into_value()),*]
}};
}
/// Construct named params from a hetergeneous set of params types.
#[macro_export]
macro_rules! named_params {
() => {
()
};
($($param_name:literal: $value:expr),* $(,)?) => {{
use $crate::params::IntoValue;
[$(($param_name, $value.into_value())),*]
}};
}
#[cfg(test)]
mod tests {
use crate::Value;
#[test]
fn test_serialize_array() {
assert_eq!(
params!([0; 16])[0].as_ref().unwrap(),
&Value::Blob(vec![0; 16])
);
}
}

364
bindings/rust/src/value.rs Normal file
View File

@@ -0,0 +1,364 @@
use std::str::FromStr;
use crate::{Error, Result};
#[derive(Clone, Debug, PartialEq)]
pub enum Value {
Null,
Integer(i64),
Real(f64),
Text(String),
Blob(Vec<u8>),
}
/// The possible types a column can be in libsql.
#[derive(Debug, Copy, Clone)]
pub enum ValueType {
Integer = 1,
Real,
Text,
Blob,
Null,
}
impl FromStr for ValueType {
type Err = ();
fn from_str(s: &str) -> std::result::Result<ValueType, Self::Err> {
match s {
"TEXT" => Ok(ValueType::Text),
"INTEGER" => Ok(ValueType::Integer),
"BLOB" => Ok(ValueType::Blob),
"NULL" => Ok(ValueType::Null),
"REAL" => Ok(ValueType::Real),
_ => Err(()),
}
}
}
impl Value {
/// Returns `true` if the value is [`Null`].
///
/// [`Null`]: Value::Null
#[must_use]
pub fn is_null(&self) -> bool {
matches!(self, Self::Null)
}
/// Returns `true` if the value is [`Integer`].
///
/// [`Integer`]: Value::Integer
#[must_use]
pub fn is_integer(&self) -> bool {
matches!(self, Self::Integer(..))
}
/// Returns `true` if the value is [`Real`].
///
/// [`Real`]: Value::Real
#[must_use]
pub fn is_real(&self) -> bool {
matches!(self, Self::Real(..))
}
pub fn as_real(&self) -> Option<&f64> {
if let Self::Real(v) = self {
Some(v)
} else {
None
}
}
/// Returns `true` if the value is [`Text`].
///
/// [`Text`]: Value::Text
#[must_use]
pub fn is_text(&self) -> bool {
matches!(self, Self::Text(..))
}
pub fn as_text(&self) -> Option<&String> {
if let Self::Text(v) = self {
Some(v)
} else {
None
}
}
pub fn as_integer(&self) -> Option<&i64> {
if let Self::Integer(v) = self {
Some(v)
} else {
None
}
}
/// Returns `true` if the value is [`Blob`].
///
/// [`Blob`]: Value::Blob
#[must_use]
pub fn is_blob(&self) -> bool {
matches!(self, Self::Blob(..))
}
pub fn as_blob(&self) -> Option<&Vec<u8>> {
if let Self::Blob(v) = self {
Some(v)
} else {
None
}
}
}
impl From<i8> for Value {
fn from(value: i8) -> Value {
Value::Integer(value as i64)
}
}
impl From<i16> for Value {
fn from(value: i16) -> Value {
Value::Integer(value as i64)
}
}
impl From<i32> for Value {
fn from(value: i32) -> Value {
Value::Integer(value as i64)
}
}
impl From<i64> for Value {
fn from(value: i64) -> Value {
Value::Integer(value)
}
}
impl From<u8> for Value {
fn from(value: u8) -> Value {
Value::Integer(value as i64)
}
}
impl From<u16> for Value {
fn from(value: u16) -> Value {
Value::Integer(value as i64)
}
}
impl From<u32> for Value {
fn from(value: u32) -> Value {
Value::Integer(value as i64)
}
}
impl TryFrom<u64> for Value {
type Error = crate::Error;
fn try_from(value: u64) -> Result<Value> {
if value > i64::MAX as u64 {
Err(Error::ToSqlConversionFailure(
"u64 is too large to fit in an i64".into(),
))
} else {
Ok(Value::Integer(value as i64))
}
}
}
impl From<f32> for Value {
fn from(value: f32) -> Value {
Value::Real(value as f64)
}
}
impl From<f64> for Value {
fn from(value: f64) -> Value {
Value::Real(value)
}
}
impl From<&str> for Value {
fn from(value: &str) -> Value {
Value::Text(value.to_owned())
}
}
impl From<String> for Value {
fn from(value: String) -> Value {
Value::Text(value)
}
}
impl From<&[u8]> for Value {
fn from(value: &[u8]) -> Value {
Value::Blob(value.to_owned())
}
}
impl From<Vec<u8>> for Value {
fn from(value: Vec<u8>) -> Value {
Value::Blob(value)
}
}
impl From<bool> for Value {
fn from(value: bool) -> Value {
Value::Integer(value as i64)
}
}
impl<T> From<Option<T>> for Value
where
T: Into<Value>,
{
fn from(value: Option<T>) -> Self {
match value {
Some(inner) => inner.into(),
None => Value::Null,
}
}
}
/// A borrowed version of `Value`.
#[derive(Debug)]
pub enum ValueRef<'a> {
Null,
Integer(i64),
Real(f64),
Text(&'a [u8]),
Blob(&'a [u8]),
}
impl ValueRef<'_> {
pub fn data_type(&self) -> ValueType {
match *self {
ValueRef::Null => ValueType::Null,
ValueRef::Integer(_) => ValueType::Integer,
ValueRef::Real(_) => ValueType::Real,
ValueRef::Text(_) => ValueType::Text,
ValueRef::Blob(_) => ValueType::Blob,
}
}
/// Returns `true` if the value ref is [`Null`].
///
/// [`Null`]: ValueRef::Null
#[must_use]
pub fn is_null(&self) -> bool {
matches!(self, Self::Null)
}
/// Returns `true` if the value ref is [`Integer`].
///
/// [`Integer`]: ValueRef::Integer
#[must_use]
pub fn is_integer(&self) -> bool {
matches!(self, Self::Integer(..))
}
pub fn as_integer(&self) -> Option<&i64> {
if let Self::Integer(v) = self {
Some(v)
} else {
None
}
}
/// Returns `true` if the value ref is [`Real`].
///
/// [`Real`]: ValueRef::Real
#[must_use]
pub fn is_real(&self) -> bool {
matches!(self, Self::Real(..))
}
pub fn as_real(&self) -> Option<&f64> {
if let Self::Real(v) = self {
Some(v)
} else {
None
}
}
/// Returns `true` if the value ref is [`Text`].
///
/// [`Text`]: ValueRef::Text
#[must_use]
pub fn is_text(&self) -> bool {
matches!(self, Self::Text(..))
}
pub fn as_text(&self) -> Option<&[u8]> {
if let Self::Text(v) = self {
Some(v)
} else {
None
}
}
/// Returns `true` if the value ref is [`Blob`].
///
/// [`Blob`]: ValueRef::Blob
#[must_use]
pub fn is_blob(&self) -> bool {
matches!(self, Self::Blob(..))
}
pub fn as_blob(&self) -> Option<&[u8]> {
if let Self::Blob(v) = self {
Some(v)
} else {
None
}
}
}
impl From<ValueRef<'_>> for Value {
fn from(vr: ValueRef<'_>) -> Value {
match vr {
ValueRef::Null => Value::Null,
ValueRef::Integer(i) => Value::Integer(i),
ValueRef::Real(r) => Value::Real(r),
ValueRef::Text(s) => Value::Text(String::from_utf8_lossy(s).to_string()),
ValueRef::Blob(b) => Value::Blob(b.to_vec()),
}
}
}
impl<'a> From<&'a str> for ValueRef<'a> {
fn from(s: &str) -> ValueRef<'_> {
ValueRef::Text(s.as_bytes())
}
}
impl<'a> From<&'a [u8]> for ValueRef<'a> {
fn from(s: &[u8]) -> ValueRef<'_> {
ValueRef::Blob(s)
}
}
impl<'a> From<&'a Value> for ValueRef<'a> {
fn from(v: &'a Value) -> ValueRef<'a> {
match *v {
Value::Null => ValueRef::Null,
Value::Integer(i) => ValueRef::Integer(i),
Value::Real(r) => ValueRef::Real(r),
Value::Text(ref s) => ValueRef::Text(s.as_bytes()),
Value::Blob(ref b) => ValueRef::Blob(b),
}
}
}
impl<'a, T> From<Option<T>> for ValueRef<'a>
where
T: Into<ValueRef<'a>>,
{
#[inline]
fn from(s: Option<T>) -> ValueRef<'a> {
match s {
Some(x) => x.into(),
None => ValueRef::Null,
}
}
}

View File

@@ -1,12 +1,12 @@
{
"name": "limbo-wasm",
"version": "0.0.11",
"version": "0.0.12",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "limbo-wasm",
"version": "0.0.11",
"version": "0.0.12",
"license": "MIT",
"devDependencies": {
"@playwright/test": "^1.49.1",

View File

@@ -3,7 +3,7 @@
"collaborators": [
"the Limbo authors"
],
"version": "0.0.11",
"version": "0.0.12",
"license": "MIT",
"repository": {
"type": "git",

View File

@@ -3,7 +3,7 @@ use crate::{
opcodes_dictionary::OPCODE_DESCRIPTIONS,
};
use cli_table::{Cell, Table};
use limbo_core::{Database, LimboError, StepResult, Value};
use limbo_core::{Database, LimboError, Rows, StepResult, Value};
use clap::{Parser, ValueEnum};
use std::{
@@ -129,6 +129,8 @@ pub enum Command {
Tables,
/// Import data from FILE into TABLE
Import,
/// Loads an extension library
LoadExtension,
}
impl Command {
@@ -141,7 +143,12 @@ impl Command {
| Self::ShowInfo
| Self::Tables
| Self::SetOutput => 0,
Self::Open | Self::OutputMode | Self::Cwd | Self::Echo | Self::NullValue => 1,
Self::Open
| Self::OutputMode
| Self::Cwd
| Self::Echo
| Self::NullValue
| Self::LoadExtension => 1,
Self::Import => 2,
} + 1) // argv0
}
@@ -160,6 +167,7 @@ impl Command {
Self::NullValue => ".nullvalue <string>",
Self::Echo => ".echo on|off",
Self::Tables => ".tables",
Self::LoadExtension => ".load",
Self::Import => &IMPORT_HELP,
}
}
@@ -182,6 +190,7 @@ impl FromStr for Command {
".nullvalue" => Ok(Self::NullValue),
".echo" => Ok(Self::Echo),
".import" => Ok(Self::Import),
".load" => Ok(Self::LoadExtension),
_ => Err("Unknown command".to_string()),
}
}
@@ -295,8 +304,14 @@ impl Limbo {
fn handle_first_input(&mut self, cmd: &str) {
if cmd.trim().starts_with('.') {
self.handle_dot_command(cmd);
} else if let Err(e) = self.query(cmd) {
eprintln!("{}", e);
} else {
let conn = self.conn.clone();
let runner = conn.query_runner(cmd.as_bytes());
for output in runner {
if let Err(e) = self.print_query_result(cmd, output) {
let _ = self.writeln(e.to_string());
}
}
}
std::process::exit(0);
}
@@ -314,6 +329,14 @@ impl Limbo {
};
}
#[cfg(not(target_family = "wasm"))]
fn handle_load_extension(&mut self, path: &str) -> Result<(), String> {
let ext_path = limbo_core::resolve_ext_path(path).map_err(|e| e.to_string())?;
self.conn
.load_extension(ext_path)
.map_err(|e| e.to_string())
}
fn display_in_memory(&mut self) -> std::io::Result<()> {
if self.opts.db_file == ":memory:" {
self.writeln("Connected to a transient in-memory database.")?;
@@ -426,17 +449,16 @@ impl Limbo {
self.buffer_input(line);
let buff = self.input_buff.clone();
let echo = self.opts.echo;
buff.split(';')
.map(str::trim)
.filter(|s| !s.is_empty())
.for_each(|stmt| {
if echo {
let _ = self.writeln(stmt);
}
if let Err(e) = self.query(stmt) {
let _ = self.writeln(e.to_string());
}
});
if echo {
let _ = self.writeln(&buff);
}
let conn = self.conn.clone();
let runner = conn.query_runner(buff.as_bytes());
for output in runner {
if let Err(e) = self.print_query_result(&buff, output) {
let _ = self.writeln(e.to_string());
}
}
self.reset_input();
} else {
self.buffer_input(line);
@@ -537,6 +559,13 @@ impl Limbo {
let _ = self.writeln(e.to_string());
};
}
Command::LoadExtension =>
{
#[cfg(not(target_family = "wasm"))]
if let Err(e) = self.handle_load_extension(args[1]) {
let _ = self.writeln(&e);
}
}
}
} else {
let _ = self.write_fmt(format_args!(
@@ -546,8 +575,12 @@ impl Limbo {
}
}
pub fn query(&mut self, sql: &str) -> anyhow::Result<()> {
match self.conn.query(sql) {
fn print_query_result(
&mut self,
sql: &str,
mut output: Result<Option<Rows>, LimboError>,
) -> anyhow::Result<()> {
match output {
Ok(Some(ref mut rows)) => match self.opts.output_mode {
OutputMode::Raw => loop {
if self.interrupt_count.load(Ordering::SeqCst) > 0 {

View File

@@ -22,7 +22,7 @@ json = [
"dep:pest_derive",
]
uuid = ["dep:uuid"]
io_uring = ["dep:io-uring"]
io_uring = ["dep:io-uring", "rustix/io_uring"]
[target.'cfg(target_os = "linux")'.dependencies]
io-uring = { version = "0.6.1", optional = true }
@@ -35,12 +35,12 @@ rustix = "0.38.34"
mimalloc = { version = "*", default-features = false }
[dependencies]
limbo_extension = { path = "../limbo_extension" }
cfg_block = "0.1.1"
fallible-iterator = "0.3.0"
hex = "0.4.3"
libc = "0.2.155"
log = "0.4.20"
nix = { version = "0.29.0", features = ["fs"] }
sieve-cache = "0.1.4"
sqlite3-parser = { path = "../vendored/sqlite3-parser" }
thiserror = "1.0.61"
@@ -59,6 +59,7 @@ bumpalo = { version = "3.16.0", features = ["collections", "boxed"] }
limbo_macros = { path = "../macros" }
uuid = { version = "1.11.0", features = ["v4", "v7"], optional = true }
miette = "7.4.0"
libloading = "0.8.6"
[target.'cfg(not(target_family = "windows"))'.dev-dependencies]
pprof = { version = "0.14.0", features = ["criterion", "flamegraph"] }

View File

@@ -1,3 +1,5 @@
use std::num::NonZero;
use thiserror::Error;
#[derive(Debug, Error, miette::Diagnostic)]
@@ -39,6 +41,10 @@ pub enum LimboError {
InvalidModifier(String),
#[error("Runtime error: {0}")]
Constraint(String),
#[error("Extension error: {0}")]
ExtensionError(String),
#[error("Unbound parameter at index {0}")]
Unbound(NonZero<usize>),
}
#[macro_export]

View File

@@ -1,37 +1,44 @@
#[cfg(feature = "uuid")]
mod uuid;
#[cfg(feature = "uuid")]
pub use uuid::{exec_ts_from_uuid7, exec_uuid, exec_uuidblob, exec_uuidstr, UuidFunc};
use crate::{function::ExternalFunc, Database};
use limbo_extension::{ExtensionApi, ResultCode, ScalarFunction, RESULT_ERROR, RESULT_OK};
pub use limbo_extension::{Value as ExtValue, ValueType as ExtValueType};
use std::{
ffi::{c_char, c_void, CStr},
rc::Rc,
};
#[derive(Debug, Clone, PartialEq)]
pub enum ExtFunc {
#[cfg(feature = "uuid")]
Uuid(UuidFunc),
extern "C" fn register_scalar_function(
ctx: *mut c_void,
name: *const c_char,
func: ScalarFunction,
) -> ResultCode {
let c_str = unsafe { CStr::from_ptr(name) };
let name_str = match c_str.to_str() {
Ok(s) => s.to_string(),
Err(_) => return RESULT_ERROR,
};
if ctx.is_null() {
return RESULT_ERROR;
}
let db = unsafe { &*(ctx as *const Database) };
db.register_scalar_function_impl(name_str, func)
}
#[allow(unreachable_patterns)] // TODO: remove when more extension funcs added
impl std::fmt::Display for ExtFunc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
#[cfg(feature = "uuid")]
Self::Uuid(uuidfn) => write!(f, "{}", uuidfn),
_ => write!(f, "unknown"),
impl Database {
fn register_scalar_function_impl(&self, name: String, func: ScalarFunction) -> ResultCode {
self.syms.borrow_mut().functions.insert(
name.to_string(),
Rc::new(ExternalFunc {
name: name.to_string(),
func,
}),
);
RESULT_OK
}
pub fn build_limbo_extension(&self) -> ExtensionApi {
ExtensionApi {
ctx: self as *const _ as *mut c_void,
register_scalar_function,
}
}
}
#[allow(unreachable_patterns)]
impl ExtFunc {
pub fn resolve_function(name: &str, num_args: usize) -> Option<ExtFunc> {
match name {
#[cfg(feature = "uuid")]
name => UuidFunc::resolve_function(name, num_args),
_ => None,
}
}
}
pub fn init(db: &mut crate::Database) {
#[cfg(feature = "uuid")]
uuid::init(db);
}

View File

@@ -1,343 +0,0 @@
use super::ExtFunc;
use crate::{
types::{LimboText, OwnedValue},
Database, LimboError,
};
use std::rc::Rc;
use uuid::{ContextV7, Timestamp, Uuid};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum UuidFunc {
Uuid4Str,
Uuid7,
Uuid7TS,
UuidStr,
UuidBlob,
}
impl UuidFunc {
pub fn resolve_function(name: &str, num_args: usize) -> Option<ExtFunc> {
match name {
"uuid4_str" => Some(ExtFunc::Uuid(Self::Uuid4Str)),
"uuid7" if num_args < 2 => Some(ExtFunc::Uuid(Self::Uuid7)),
"uuid_str" if num_args == 1 => Some(ExtFunc::Uuid(Self::UuidStr)),
"uuid_blob" if num_args == 1 => Some(ExtFunc::Uuid(Self::UuidBlob)),
"uuid7_timestamp_ms" if num_args == 1 => Some(ExtFunc::Uuid(Self::Uuid7TS)),
// postgres_compatability
"gen_random_uuid" => Some(ExtFunc::Uuid(Self::Uuid4Str)),
_ => None,
}
}
}
impl std::fmt::Display for UuidFunc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Uuid4Str => write!(f, "uuid4_str"),
Self::Uuid7 => write!(f, "uuid7"),
Self::Uuid7TS => write!(f, "uuid7_timestamp_ms"),
Self::UuidStr => write!(f, "uuid_str"),
Self::UuidBlob => write!(f, "uuid_blob"),
}
}
}
pub fn exec_uuid(var: &UuidFunc, sec: Option<&OwnedValue>) -> crate::Result<OwnedValue> {
match var {
UuidFunc::Uuid4Str => Ok(OwnedValue::Text(LimboText::new(Rc::new(
Uuid::new_v4().to_string(),
)))),
UuidFunc::Uuid7 => {
let uuid = match sec {
Some(OwnedValue::Integer(ref seconds)) => {
let ctx = ContextV7::new();
if *seconds < 0 {
// not valid unix timestamp, error or null?
return Ok(OwnedValue::Null);
}
Uuid::new_v7(Timestamp::from_unix(ctx, *seconds as u64, 0))
}
_ => Uuid::now_v7(),
};
Ok(OwnedValue::Blob(Rc::new(uuid.into_bytes().to_vec())))
}
_ => unreachable!(),
}
}
pub fn exec_uuid4() -> crate::Result<OwnedValue> {
Ok(OwnedValue::Blob(Rc::new(
Uuid::new_v4().into_bytes().to_vec(),
)))
}
pub fn exec_uuidstr(reg: &OwnedValue) -> crate::Result<OwnedValue> {
match reg {
OwnedValue::Blob(blob) => {
let uuid = Uuid::from_slice(blob).map_err(|e| LimboError::ParseError(e.to_string()))?;
Ok(OwnedValue::Text(LimboText::new(Rc::new(uuid.to_string()))))
}
OwnedValue::Text(ref val) => {
let uuid =
Uuid::parse_str(&val.value).map_err(|e| LimboError::ParseError(e.to_string()))?;
Ok(OwnedValue::Text(LimboText::new(Rc::new(uuid.to_string()))))
}
OwnedValue::Null => Ok(OwnedValue::Null),
_ => Err(LimboError::ParseError(
"Invalid argument type for UUID function".to_string(),
)),
}
}
pub fn exec_uuidblob(reg: &OwnedValue) -> crate::Result<OwnedValue> {
match reg {
OwnedValue::Text(val) => {
let uuid =
Uuid::parse_str(&val.value).map_err(|e| LimboError::ParseError(e.to_string()))?;
Ok(OwnedValue::Blob(Rc::new(uuid.as_bytes().to_vec())))
}
OwnedValue::Blob(blob) => {
let uuid = Uuid::from_slice(blob).map_err(|e| LimboError::ParseError(e.to_string()))?;
Ok(OwnedValue::Blob(Rc::new(uuid.as_bytes().to_vec())))
}
OwnedValue::Null => Ok(OwnedValue::Null),
_ => Err(LimboError::ParseError(
"Invalid argument type for UUID function".to_string(),
)),
}
}
pub fn exec_ts_from_uuid7(reg: &OwnedValue) -> OwnedValue {
let uuid = match reg {
OwnedValue::Blob(blob) => {
Uuid::from_slice(blob).map_err(|e| LimboError::ParseError(e.to_string()))
}
OwnedValue::Text(val) => {
Uuid::parse_str(&val.value).map_err(|e| LimboError::ParseError(e.to_string()))
}
_ => Err(LimboError::ParseError(
"Invalid argument type for UUID function".to_string(),
)),
};
match uuid {
Ok(uuid) => OwnedValue::Integer(uuid_to_unix(uuid.as_bytes()) as i64),
// display error? sqlean seems to set value to null
Err(_) => OwnedValue::Null,
}
}
#[inline(always)]
fn uuid_to_unix(uuid: &[u8; 16]) -> u64 {
((uuid[0] as u64) << 40)
| ((uuid[1] as u64) << 32)
| ((uuid[2] as u64) << 24)
| ((uuid[3] as u64) << 16)
| ((uuid[4] as u64) << 8)
| (uuid[5] as u64)
}
pub fn init(db: &mut Database) {
db.define_scalar_function("uuid4", |_args| exec_uuid4());
}
#[cfg(test)]
#[cfg(feature = "uuid")]
pub mod test {
use super::UuidFunc;
use crate::types::OwnedValue;
#[test]
fn test_exec_uuid_v4blob() {
use super::exec_uuid4;
use uuid::Uuid;
let owned_val = exec_uuid4();
match owned_val {
Ok(OwnedValue::Blob(blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(&blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 4);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v4str() {
use super::{exec_uuid, UuidFunc};
use uuid::Uuid;
let func = UuidFunc::Uuid4Str;
let owned_val = exec_uuid(&func, None);
match owned_val {
Ok(OwnedValue::Text(v4str)) => {
assert_eq!(v4str.value.len(), 36);
let uuid = Uuid::parse_str(&v4str.value);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 4);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v7_now() {
use super::{exec_uuid, UuidFunc};
use uuid::Uuid;
let func = UuidFunc::Uuid7;
let owned_val = exec_uuid(&func, None);
match owned_val {
Ok(OwnedValue::Blob(blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(&blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v7_with_input() {
use super::{exec_uuid, UuidFunc};
use uuid::Uuid;
let func = UuidFunc::Uuid7;
let owned_val = exec_uuid(&func, Some(&OwnedValue::Integer(946702800)));
match owned_val {
Ok(OwnedValue::Blob(blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(&blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v7_now_to_timestamp() {
use super::{exec_ts_from_uuid7, exec_uuid, UuidFunc};
use uuid::Uuid;
let func = UuidFunc::Uuid7;
let owned_val = exec_uuid(&func, None);
match owned_val {
Ok(OwnedValue::Blob(ref blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
let result = exec_ts_from_uuid7(&owned_val.expect("uuid7"));
if let OwnedValue::Integer(ref ts) = result {
let unixnow = (std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
* 1000) as i64;
assert!(*ts >= unixnow - 1000);
}
}
#[test]
fn test_exec_uuid_v7_to_timestamp() {
use super::{exec_ts_from_uuid7, exec_uuid, UuidFunc};
use uuid::Uuid;
let func = UuidFunc::Uuid7;
let owned_val = exec_uuid(&func, Some(&OwnedValue::Integer(946702800)));
match owned_val {
Ok(OwnedValue::Blob(ref blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
let result = exec_ts_from_uuid7(&owned_val.expect("uuid7"));
assert_eq!(result, OwnedValue::Integer(946702800 * 1000));
if let OwnedValue::Integer(ts) = result {
let time = chrono::DateTime::from_timestamp(ts / 1000, 0);
assert_eq!(
time.unwrap(),
"2000-01-01T05:00:00Z"
.parse::<chrono::DateTime<chrono::Utc>>()
.unwrap()
);
}
}
#[test]
fn test_exec_uuid_v4_str_to_blob() {
use super::{exec_uuid, exec_uuidblob, UuidFunc};
use uuid::Uuid;
let owned_val = exec_uuidblob(
&exec_uuid(&UuidFunc::Uuid4Str, None).expect("uuid v4 string to generate"),
);
match owned_val {
Ok(OwnedValue::Blob(blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(&blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 4);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v7_str_to_blob() {
use super::{exec_uuid, exec_uuidblob, exec_uuidstr, UuidFunc};
use uuid::Uuid;
// convert a v7 blob to a string then back to a blob
let owned_val = exec_uuidblob(
&exec_uuidstr(&exec_uuid(&UuidFunc::Uuid7, None).expect("uuid v7 blob to generate"))
.expect("uuid v7 string to generate"),
);
match owned_val {
Ok(OwnedValue::Blob(blob)) => {
assert_eq!(blob.len(), 16);
let uuid = Uuid::from_slice(&blob);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v4_blob_to_str() {
use super::{exec_uuid4, exec_uuidstr};
use uuid::Uuid;
// convert a v4 blob to a string
let owned_val = exec_uuidstr(&exec_uuid4().expect("uuid v7 blob to generate"));
match owned_val {
Ok(OwnedValue::Text(v4str)) => {
assert_eq!(v4str.value.len(), 36);
let uuid = Uuid::parse_str(&v4str.value);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 4);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
#[test]
fn test_exec_uuid_v7_blob_to_str() {
use super::{exec_uuid, exec_uuidstr};
use uuid::Uuid;
// convert a v7 blob to a string
let owned_val = exec_uuidstr(
&exec_uuid(&UuidFunc::Uuid7, Some(&OwnedValue::Integer(123456789)))
.expect("uuid v7 blob to generate"),
);
match owned_val {
Ok(OwnedValue::Text(v7str)) => {
assert_eq!(v7str.value.len(), 36);
let uuid = Uuid::parse_str(&v7str.value);
assert!(uuid.is_ok());
assert_eq!(uuid.unwrap().get_version_num(), 7);
}
_ => panic!("exec_uuid did not return a Blob variant"),
}
}
}

View File

@@ -1,11 +1,21 @@
use crate::ext::ExtFunc;
use std::fmt;
use std::fmt::{Debug, Display};
use std::rc::Rc;
use limbo_extension::ScalarFunction;
pub struct ExternalFunc {
pub name: String,
pub func: Box<dyn Fn(&[crate::types::Value]) -> crate::Result<crate::types::OwnedValue>>,
pub func: ScalarFunction,
}
impl ExternalFunc {
pub fn new(name: &str, func: ScalarFunction) -> Self {
Self {
name: name.to_string(),
func,
}
}
}
impl Debug for ExternalFunc {
@@ -128,6 +138,8 @@ pub enum ScalarFunc {
ZeroBlob,
LastInsertRowid,
Replace,
#[cfg(not(target_family = "wasm"))]
LoadExtension,
}
impl Display for ScalarFunc {
@@ -177,6 +189,8 @@ impl Display for ScalarFunc {
Self::LastInsertRowid => "last_insert_rowid".to_string(),
Self::Replace => "replace".to_string(),
Self::DateTime => "datetime".to_string(),
#[cfg(not(target_family = "wasm"))]
Self::LoadExtension => "load_extension".to_string(),
};
write!(f, "{}", str)
}
@@ -301,7 +315,6 @@ pub enum Func {
Math(MathFunc),
#[cfg(feature = "json")]
Json(JsonFunc),
Extension(ExtFunc),
External(Rc<ExternalFunc>),
}
@@ -313,7 +326,6 @@ impl Display for Func {
Self::Math(math_func) => write!(f, "{}", math_func),
#[cfg(feature = "json")]
Self::Json(json_func) => write!(f, "{}", json_func),
Self::Extension(ext_func) => write!(f, "{}", ext_func),
Self::External(generic_func) => write!(f, "{}", generic_func),
}
}
@@ -422,10 +434,9 @@ impl Func {
"tan" => Ok(Self::Math(MathFunc::Tan)),
"tanh" => Ok(Self::Math(MathFunc::Tanh)),
"trunc" => Ok(Self::Math(MathFunc::Trunc)),
_ => match ExtFunc::resolve_function(name, arg_count) {
Some(ext_func) => Ok(Self::Extension(ext_func)),
None => Err(()),
},
#[cfg(not(target_family = "wasm"))]
"load_extension" => Ok(Self::Scalar(ScalarFunc::LoadExtension)),
_ => Err(()),
}
}
}

View File

@@ -1,16 +1,18 @@
use super::{common, Completion, File, OpenFlags, IO};
use crate::{LimboError, Result};
use libc::{c_short, fcntl, flock, iovec, F_SETLK};
use log::{debug, trace};
use nix::fcntl::{FcntlArg, OFlag};
use rustix::fs::{self, FlockOperation, OFlags};
use rustix::io_uring::iovec;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use std::io::ErrorKind;
use std::os::fd::AsFd;
use std::os::unix::io::AsRawFd;
use std::rc::Rc;
use thiserror::Error;
const MAX_IOVECS: usize = 128;
const MAX_IOVECS: u32 = 128;
const SQPOLL_IDLE: u32 = 1000;
#[derive(Debug, Error)]
@@ -44,7 +46,7 @@ struct WrappedIOUring {
struct InnerUringIO {
ring: WrappedIOUring,
iovecs: [iovec; MAX_IOVECS],
iovecs: [iovec; MAX_IOVECS as usize],
next_iovec: usize,
}
@@ -52,10 +54,10 @@ impl UringIO {
pub fn new() -> Result<Self> {
let ring = match io_uring::IoUring::builder()
.setup_sqpoll(SQPOLL_IDLE)
.build(MAX_IOVECS as u32)
.build(MAX_IOVECS)
{
Ok(ring) => ring,
Err(_) => io_uring::IoUring::new(MAX_IOVECS as u32)?,
Err(_) => io_uring::IoUring::new(MAX_IOVECS)?,
};
let inner = InnerUringIO {
ring: WrappedIOUring {
@@ -67,7 +69,7 @@ impl UringIO {
iovecs: [iovec {
iov_base: std::ptr::null_mut(),
iov_len: 0,
}; MAX_IOVECS],
}; MAX_IOVECS as usize],
next_iovec: 0,
};
debug!("Using IO backend 'io-uring'");
@@ -82,14 +84,14 @@ impl InnerUringIO {
let iovec = &mut self.iovecs[self.next_iovec];
iovec.iov_base = buf as *mut std::ffi::c_void;
iovec.iov_len = len;
self.next_iovec = (self.next_iovec + 1) % MAX_IOVECS;
self.next_iovec = (self.next_iovec + 1) % MAX_IOVECS as usize;
iovec
}
}
impl WrappedIOUring {
fn submit_entry(&mut self, entry: &io_uring::squeue::Entry, c: Rc<Completion>) {
log::trace!("submit_entry({:?})", entry);
trace!("submit_entry({:?})", entry);
self.pending.insert(entry.get_user_data(), c);
unsafe {
self.ring
@@ -109,7 +111,7 @@ impl WrappedIOUring {
// NOTE: This works because CompletionQueue's next function pops the head of the queue. This is not normal behaviour of iterators
let entry = self.ring.completion().next();
if entry.is_some() {
log::trace!("get_completion({:?})", entry);
trace!("get_completion({:?})", entry);
// consumed an entry from completion queue, update pending_ops
self.pending_ops -= 1;
}
@@ -136,12 +138,12 @@ impl IO for UringIO {
.open(path)?;
// Let's attempt to enable direct I/O. Not all filesystems support it
// so ignore any errors.
let fd = file.as_raw_fd();
let fd = file.as_fd();
if direct {
match nix::fcntl::fcntl(fd, FcntlArg::F_SETFL(OFlag::O_DIRECT)) {
Ok(_) => {},
match fs::fcntl_setfl(fd, OFlags::DIRECT) {
Ok(_) => {}
Err(error) => debug!("Error {error:?} returned when setting O_DIRECT flag to read file. The performance of the system may be affected"),
};
}
}
let uring_file = Rc::new(UringFile {
io: self.inner.clone(),
@@ -199,52 +201,39 @@ pub struct UringFile {
impl File for UringFile {
fn lock_file(&self, exclusive: bool) -> Result<()> {
let fd = self.file.as_raw_fd();
let flock = flock {
l_type: if exclusive {
libc::F_WRLCK as c_short
} else {
libc::F_RDLCK as c_short
},
l_whence: libc::SEEK_SET as c_short,
l_start: 0,
l_len: 0, // Lock entire file
l_pid: 0,
};
let fd = self.file.as_fd();
// F_SETLK is a non-blocking lock. The lock will be released when the file is closed
// or the process exits or after an explicit unlock.
let lock_result = unsafe { fcntl(fd, F_SETLK, &flock) };
if lock_result == -1 {
let err = std::io::Error::last_os_error();
if err.kind() == std::io::ErrorKind::WouldBlock {
return Err(LimboError::LockingError(
"File is locked by another process".into(),
));
fs::fcntl_lock(
fd,
if exclusive {
FlockOperation::NonBlockingLockExclusive
} else {
return Err(LimboError::IOError(err));
}
}
FlockOperation::NonBlockingLockShared
},
)
.map_err(|e| {
let io_error = std::io::Error::from(e);
let message = match io_error.kind() {
ErrorKind::WouldBlock => {
"Failed locking file. File is locked by another process".to_string()
}
_ => format!("Failed locking file, {}", io_error),
};
LimboError::LockingError(message)
})?;
Ok(())
}
fn unlock_file(&self) -> Result<()> {
let fd = self.file.as_raw_fd();
let flock = flock {
l_type: libc::F_UNLCK as c_short,
l_whence: libc::SEEK_SET as c_short,
l_start: 0,
l_len: 0,
l_pid: 0,
};
let unlock_result = unsafe { fcntl(fd, F_SETLK, &flock) };
if unlock_result == -1 {
return Err(LimboError::LockingError(format!(
let fd = self.file.as_fd();
fs::fcntl_lock(fd, FlockOperation::NonBlockingUnlock).map_err(|e| {
LimboError::LockingError(format!(
"Failed to release file lock: {}",
std::io::Error::last_os_error()
)));
}
std::io::Error::from(e)
))
})?;
Ok(())
}
@@ -261,7 +250,7 @@ impl File for UringFile {
let len = buf.len();
let buf = buf.as_mut_ptr();
let iovec = io.get_iovec(buf, len);
io_uring::opcode::Readv::new(fd, iovec, 1)
io_uring::opcode::Readv::new(fd, iovec as *const iovec as *const libc::iovec, 1)
.offset(pos as u64)
.build()
.user_data(io.ring.get_key())
@@ -282,7 +271,7 @@ impl File for UringFile {
let buf = buffer.borrow();
trace!("pwrite(pos = {}, length = {})", pos, buf.len());
let iovec = io.get_iovec(buf.as_ptr(), buf.len());
io_uring::opcode::Writev::new(fd, iovec, 1)
io_uring::opcode::Writev::new(fd, iovec as *const iovec as *const libc::iovec, 1)
.offset(pos as u64)
.build()
.user_data(io.ring.get_key())
@@ -303,7 +292,7 @@ impl File for UringFile {
}
fn size(&self) -> Result<u64> {
Ok(self.file.metadata().unwrap().len())
Ok(self.file.metadata()?.len())
}
}

View File

@@ -3,15 +3,16 @@ use crate::io::common;
use crate::Result;
use super::{Completion, File, OpenFlags, IO};
use libc::{c_short, fcntl, flock, F_SETLK};
use log::{debug, trace};
use polling::{Event, Events, Poller};
use rustix::fd::{AsFd, AsRawFd};
use rustix::fs::OpenOptionsExt;
use rustix::io::Errno;
use rustix::{
fd::{AsFd, AsRawFd},
fs::{self, FlockOperation, OFlags, OpenOptionsExt},
io::Errno,
};
use std::cell::RefCell;
use std::collections::HashMap;
use std::io::{Read, Seek, Write};
use std::io::{ErrorKind, Read, Seek, Write};
use std::rc::Rc;
pub struct UnixIO {
@@ -36,7 +37,7 @@ impl IO for UnixIO {
trace!("open_file(path = {})", path);
let file = std::fs::File::options()
.read(true)
.custom_flags(libc::O_NONBLOCK)
.custom_flags(OFlags::NONBLOCK.bits() as i32)
.write(true)
.create(matches!(flags, OpenFlags::Create))
.open(path)?;
@@ -86,8 +87,8 @@ impl IO for UnixIO {
}
}
};
match result {
std::result::Result::Ok(n) => {
return match result {
Ok(n) => {
match &cf {
CompletionCallback::Read(_, ref c, _) => {
c.complete(0);
@@ -96,12 +97,10 @@ impl IO for UnixIO {
c.complete(n as i32);
}
}
return Ok(());
Ok(())
}
Err(e) => {
return Err(e.into());
}
}
Err(e) => Err(e.into()),
};
}
}
Ok(())
@@ -130,61 +129,47 @@ enum CompletionCallback {
pub struct UnixFile {
file: Rc<RefCell<std::fs::File>>,
poller: Rc<RefCell<polling::Poller>>,
poller: Rc<RefCell<Poller>>,
callbacks: Rc<RefCell<HashMap<usize, CompletionCallback>>>,
}
impl File for UnixFile {
fn lock_file(&self, exclusive: bool) -> Result<()> {
let fd = self.file.borrow().as_raw_fd();
let flock = flock {
l_type: if exclusive {
libc::F_WRLCK as c_short
} else {
libc::F_RDLCK as c_short
},
l_whence: libc::SEEK_SET as c_short,
l_start: 0,
l_len: 0, // Lock entire file
l_pid: 0,
};
let fd = self.file.borrow();
let fd = fd.as_fd();
// F_SETLK is a non-blocking lock. The lock will be released when the file is closed
// or the process exits or after an explicit unlock.
let lock_result = unsafe { fcntl(fd, F_SETLK, &flock) };
if lock_result == -1 {
let err = std::io::Error::last_os_error();
if err.kind() == std::io::ErrorKind::WouldBlock {
return Err(LimboError::LockingError(
"Failed locking file. File is locked by another process".to_string(),
));
fs::fcntl_lock(
fd,
if exclusive {
FlockOperation::NonBlockingLockExclusive
} else {
return Err(LimboError::LockingError(format!(
"Failed locking file, {}",
err
)));
}
}
FlockOperation::NonBlockingLockShared
},
)
.map_err(|e| {
let io_error = std::io::Error::from(e);
let message = match io_error.kind() {
ErrorKind::WouldBlock => {
"Failed locking file. File is locked by another process".to_string()
}
_ => format!("Failed locking file, {}", io_error),
};
LimboError::LockingError(message)
})?;
Ok(())
}
fn unlock_file(&self) -> Result<()> {
let fd = self.file.borrow().as_raw_fd();
let flock = flock {
l_type: libc::F_UNLCK as c_short,
l_whence: libc::SEEK_SET as c_short,
l_start: 0,
l_len: 0,
l_pid: 0,
};
let unlock_result = unsafe { fcntl(fd, F_SETLK, &flock) };
if unlock_result == -1 {
return Err(LimboError::LockingError(format!(
let fd = self.file.borrow();
let fd = fd.as_fd();
fs::fcntl_lock(fd, FlockOperation::NonBlockingUnlock).map_err(|e| {
LimboError::LockingError(format!(
"Failed to release file lock: {}",
std::io::Error::last_os_error()
)));
}
std::io::Error::from(e)
))
})?;
Ok(())
}
@@ -199,7 +184,7 @@ impl File for UnixFile {
rustix::io::pread(file.as_fd(), buf.as_mut_slice(), pos as u64)
};
match result {
std::result::Result::Ok(n) => {
Ok(n) => {
trace!("pread n: {}", n);
// Read succeeded immediately
c.complete(0);
@@ -236,7 +221,7 @@ impl File for UnixFile {
rustix::io::pwrite(file.as_fd(), buf.as_slice(), pos as u64)
};
match result {
std::result::Result::Ok(n) => {
Ok(n) => {
trace!("pwrite n: {}", n);
// Read succeeded immediately
c.complete(n as i32);
@@ -263,9 +248,9 @@ impl File for UnixFile {
fn sync(&self, c: Rc<Completion>) -> Result<()> {
let file = self.file.borrow();
let result = rustix::fs::fsync(file.as_fd());
let result = fs::fsync(file.as_fd());
match result {
std::result::Result::Ok(()) => {
Ok(()) => {
trace!("fsync");
c.complete(0);
Ok(())
@@ -276,7 +261,7 @@ impl File for UnixFile {
fn size(&self) -> Result<u64> {
let file = self.file.borrow();
Ok(file.metadata().unwrap().len())
Ok(file.metadata()?.len())
}
}

View File

@@ -4,6 +4,7 @@ mod function;
mod io;
#[cfg(feature = "json")]
mod json;
mod parameters;
mod pseudo;
mod result;
mod schema;
@@ -18,12 +19,17 @@ mod vdbe;
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
use fallible_iterator::FallibleIterator;
#[cfg(not(target_family = "wasm"))]
use libloading::{Library, Symbol};
#[cfg(not(target_family = "wasm"))]
use limbo_extension::{ExtensionApi, ExtensionEntryPoint, RESULT_OK};
use log::trace;
use schema::Schema;
use sqlite3_parser::ast;
use sqlite3_parser::{ast::Cmd, lexer::sql::Parser};
use std::cell::Cell;
use std::collections::HashMap;
use std::num::NonZero;
use std::sync::{Arc, OnceLock, RwLock};
use std::{cell::RefCell, rc::Rc};
use storage::btree::btree_init_page;
@@ -34,13 +40,12 @@ use storage::pager::allocate_page;
use storage::sqlite3_ondisk::{DatabaseHeader, DATABASE_HEADER_SIZE};
pub use storage::wal::WalFile;
pub use storage::wal::WalFileShared;
pub use types::Value;
use util::parse_schema_rows;
use translate::select::prepare_select_plan;
use types::OwnedValue;
pub use error::LimboError;
pub type Result<T> = std::result::Result<T, error::LimboError>;
use translate::select::prepare_select_plan;
pub type Result<T, E = error::LimboError> = std::result::Result<T, E>;
use crate::translate::optimizer::optimize_plan;
pub use io::OpenFlags;
@@ -56,8 +61,6 @@ pub use storage::pager::Page;
pub use storage::pager::Pager;
pub use storage::wal::CheckpointStatus;
pub use storage::wal::Wal;
pub use types::Value;
pub static DATABASE_VERSION: OnceLock<String> = OnceLock::new();
#[derive(Clone)]
@@ -127,7 +130,7 @@ impl Database {
let header = db_header;
let schema = Rc::new(RefCell::new(Schema::new()));
let syms = Rc::new(RefCell::new(SymbolTable::new()));
let mut db = Database {
let db = Database {
pager: pager.clone(),
schema: schema.clone(),
header: header.clone(),
@@ -135,11 +138,10 @@ impl Database {
_shared_wal: shared_wal.clone(),
syms,
};
ext::init(&mut db);
let db = Arc::new(db);
let conn = Rc::new(Connection {
db: db.clone(),
pager: pager,
pager,
schema: schema.clone(),
header,
transaction_state: RefCell::new(TransactionState::None),
@@ -169,16 +171,40 @@ impl Database {
pub fn define_scalar_function<S: AsRef<str>>(
&self,
name: S,
func: impl Fn(&[Value]) -> Result<OwnedValue> + 'static,
func: limbo_extension::ScalarFunction,
) {
let func = function::ExternalFunc {
name: name.as_ref().to_string(),
func: Box::new(func),
func,
};
self.syms
.borrow_mut()
.functions
.insert(name.as_ref().to_string(), Rc::new(func));
.insert(name.as_ref().to_string(), func.into());
}
#[cfg(not(target_family = "wasm"))]
pub fn load_extension<P: AsRef<std::ffi::OsStr>>(&self, path: P) -> Result<()> {
let api = Box::new(self.build_limbo_extension());
let lib =
unsafe { Library::new(path).map_err(|e| LimboError::ExtensionError(e.to_string()))? };
let entry: Symbol<ExtensionEntryPoint> = unsafe {
lib.get(b"register_extension")
.map_err(|e| LimboError::ExtensionError(e.to_string()))?
};
let api_ptr: *const ExtensionApi = Box::into_raw(api);
let result_code = entry(api_ptr);
if result_code == RESULT_OK {
self.syms.borrow_mut().extensions.push((lib, api_ptr));
Ok(())
} else {
if !api_ptr.is_null() {
let _ = unsafe { Box::from_raw(api_ptr.cast_mut()) };
}
Err(LimboError::ExtensionError(
"Extension registration failed".to_string(),
))
}
}
}
@@ -274,53 +300,65 @@ impl Connection {
pub fn query(self: &Rc<Connection>, sql: impl Into<String>) -> Result<Option<Rows>> {
let sql = sql.into();
trace!("Querying: {}", sql);
let db = self.db.clone();
let syms: &SymbolTable = &db.syms.borrow();
let mut parser = Parser::new(sql.as_bytes());
let cmd = parser.next()?;
if let Some(cmd) = cmd {
match cmd {
Cmd::Stmt(stmt) => {
let program = Rc::new(translate::translate(
&self.schema.borrow(),
stmt,
self.header.clone(),
self.pager.clone(),
Rc::downgrade(self),
syms,
)?);
let stmt = Statement::new(program, self.pager.clone());
Ok(Some(Rows { stmt }))
}
Cmd::Explain(stmt) => {
let program = translate::translate(
&self.schema.borrow(),
stmt,
self.header.clone(),
self.pager.clone(),
Rc::downgrade(self),
syms,
)?;
program.explain();
Ok(None)
}
Cmd::ExplainQueryPlan(stmt) => {
match stmt {
ast::Stmt::Select(select) => {
let mut plan = prepare_select_plan(&self.schema.borrow(), *select)?;
optimize_plan(&mut plan)?;
println!("{}", plan);
}
_ => todo!(),
}
Ok(None)
}
}
} else {
Ok(None)
match cmd {
Some(cmd) => self.run_cmd(cmd),
None => Ok(None),
}
}
pub(crate) fn run_cmd(self: &Rc<Connection>, cmd: Cmd) -> Result<Option<Rows>> {
let db = self.db.clone();
let syms: &SymbolTable = &db.syms.borrow();
match cmd {
Cmd::Stmt(stmt) => {
let program = Rc::new(translate::translate(
&self.schema.borrow(),
stmt,
self.header.clone(),
self.pager.clone(),
Rc::downgrade(self),
syms,
)?);
let stmt = Statement::new(program, self.pager.clone());
Ok(Some(Rows { stmt }))
}
Cmd::Explain(stmt) => {
let program = translate::translate(
&self.schema.borrow(),
stmt,
self.header.clone(),
self.pager.clone(),
Rc::downgrade(self),
syms,
)?;
program.explain();
Ok(None)
}
Cmd::ExplainQueryPlan(stmt) => {
match stmt {
ast::Stmt::Select(select) => {
let mut plan = prepare_select_plan(
&self.schema.borrow(),
*select,
&self.db.syms.borrow(),
)?;
optimize_plan(&mut plan)?;
println!("{}", plan);
}
_ => todo!(),
}
Ok(None)
}
}
}
pub fn query_runner<'a>(self: &'a Rc<Connection>, sql: &'a [u8]) -> QueryRunner<'a> {
QueryRunner::new(self, sql)
}
pub fn execute(self: &Rc<Connection>, sql: impl Into<String>) -> Result<()> {
let sql = sql.into();
let db = self.db.clone();
@@ -350,6 +388,7 @@ impl Connection {
Rc::downgrade(self),
syms,
)?;
let mut state = vdbe::ProgramState::new(program.max_registers);
program.step(&mut state, self.pager.clone())?;
}
@@ -372,6 +411,11 @@ impl Connection {
Ok(())
}
#[cfg(not(target_family = "wasm"))]
pub fn load_extension<P: AsRef<std::ffi::OsStr>>(&self, path: P) -> Result<()> {
Database::load_extension(self.db.as_ref(), path)
}
/// Close a connection and checkpoint.
pub fn close(&self) -> Result<()> {
loop {
@@ -432,7 +476,18 @@ impl Statement {
Ok(Rows::new(stmt))
}
pub fn reset(&self) {}
pub fn parameters(&self) -> &parameters::Parameters {
&self.program.parameters
}
pub fn bind_at(&mut self, index: NonZero<usize>, value: Value) {
self.state.bind_at(index, value.into());
}
pub fn reset(&mut self) {
let state = vdbe::ProgramState::new(self.program.max_registers);
self.state = state
}
}
pub enum StepResult<'a> {
@@ -468,15 +523,54 @@ impl Rows {
}
}
#[derive(Debug)]
pub(crate) struct SymbolTable {
pub functions: HashMap<String, Rc<crate::function::ExternalFunc>>,
#[cfg(not(target_family = "wasm"))]
extensions: Vec<(libloading::Library, *const ExtensionApi)>,
}
impl std::fmt::Debug for SymbolTable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SymbolTable")
.field("functions", &self.functions)
.finish()
}
}
fn is_shared_library(path: &std::path::Path) -> bool {
path.extension()
.map_or(false, |ext| ext == "so" || ext == "dylib" || ext == "dll")
}
pub fn resolve_ext_path(extpath: &str) -> Result<std::path::PathBuf> {
let path = std::path::Path::new(extpath);
if !path.exists() {
if is_shared_library(path) {
return Err(LimboError::ExtensionError(format!(
"Extension file not found: {}",
extpath
)));
};
let maybe = path.with_extension(std::env::consts::DLL_EXTENSION);
maybe
.exists()
.then_some(maybe)
.ok_or(LimboError::ExtensionError(format!(
"Extension file not found: {}",
extpath
)))
} else {
Ok(path.to_path_buf())
}
}
impl SymbolTable {
pub fn new() -> Self {
Self {
functions: HashMap::new(),
// TODO: wasm libs will be very different
#[cfg(not(target_family = "wasm"))]
extensions: Vec::new(),
}
}
@@ -488,3 +582,29 @@ impl SymbolTable {
self.functions.get(name).cloned()
}
}
pub struct QueryRunner<'a> {
parser: Parser<'a>,
conn: &'a Rc<Connection>,
}
impl<'a> QueryRunner<'a> {
pub(crate) fn new(conn: &'a Rc<Connection>, statements: &'a [u8]) -> Self {
Self {
parser: Parser::new(statements),
conn,
}
}
}
impl Iterator for QueryRunner<'_> {
type Item = Result<Option<Rows>>;
fn next(&mut self) -> Option<Self::Item> {
match self.parser.next() {
Ok(Some(cmd)) => Some(self.conn.run_cmd(cmd)),
Ok(None) => None,
Err(err) => Some(Result::Err(LimboError::from(err))),
}
}
}

111
core/parameters.rs Normal file
View File

@@ -0,0 +1,111 @@
use std::num::NonZero;
#[derive(Clone, Debug)]
pub enum Parameter {
Anonymous(NonZero<usize>),
Indexed(NonZero<usize>),
Named(String, NonZero<usize>),
}
impl PartialEq for Parameter {
fn eq(&self, other: &Self) -> bool {
self.index() == other.index()
}
}
impl Parameter {
pub fn index(&self) -> NonZero<usize> {
match self {
Parameter::Anonymous(index) => *index,
Parameter::Indexed(index) => *index,
Parameter::Named(_, index) => *index,
}
}
}
#[derive(Debug)]
pub struct Parameters {
index: NonZero<usize>,
pub list: Vec<Parameter>,
}
impl Parameters {
pub fn new() -> Self {
Self {
index: 1.try_into().unwrap(),
list: vec![],
}
}
pub fn count(&self) -> usize {
let mut params = self.list.clone();
params.dedup();
params.len()
}
pub fn name(&self, index: NonZero<usize>) -> Option<String> {
self.list.iter().find_map(|p| match p {
Parameter::Anonymous(i) if *i == index => Some("?".to_string()),
Parameter::Indexed(i) if *i == index => Some(format!("?{i}")),
Parameter::Named(name, i) if *i == index => Some(name.to_owned()),
_ => None,
})
}
pub fn index(&self, name: impl AsRef<str>) -> Option<NonZero<usize>> {
self.list
.iter()
.find_map(|p| match p {
Parameter::Named(n, index) if n == name.as_ref() => Some(index),
_ => None,
})
.copied()
}
pub fn next_index(&mut self) -> NonZero<usize> {
let index = self.index;
self.index = self.index.checked_add(1).unwrap();
index
}
pub fn push(&mut self, name: impl AsRef<str>) -> NonZero<usize> {
match name.as_ref() {
"" => {
let index = self.next_index();
self.list.push(Parameter::Anonymous(index));
log::trace!("anonymous parameter at {index}");
index
}
name if name.starts_with(&['$', ':', '@', '#']) => {
match self
.list
.iter()
.find(|p| matches!(p, Parameter::Named(n, _) if name == n))
{
Some(t) => {
let index = t.index();
self.list.push(t.clone());
log::trace!("named parameter at {index} as {name}");
index
}
None => {
let index = self.next_index();
self.list.push(Parameter::Named(name.to_owned(), index));
log::trace!("named parameter at {index} as {name}");
index
}
}
}
index => {
// SAFETY: Garanteed from parser that the index is bigger that 0.
let index: NonZero<usize> = index.parse().unwrap();
if index > self.index {
self.index = index.checked_add(1).unwrap();
}
self.list.push(Parameter::Indexed(index));
log::trace!("indexed parameter at {index}");
index
}
}
}
}

View File

@@ -1245,7 +1245,6 @@ pub fn begin_write_wal_frame(
*write_counter.borrow_mut() += 1;
let write_complete = {
let buf_copy = buffer.clone();
log::info!("finished");
Box::new(move |bytes_written: i32| {
let buf_copy = buf_copy.clone();
let buf_len = buf_copy.borrow().len();

View File

@@ -3,26 +3,23 @@ use crate::translate::emitter::emit_program;
use crate::translate::optimizer::optimize_plan;
use crate::translate::plan::{DeletePlan, Plan, SourceOperator};
use crate::translate::planner::{parse_limit, parse_where};
use crate::{schema::Schema, storage::sqlite3_ondisk::DatabaseHeader, vdbe::Program};
use crate::{Connection, Result, SymbolTable};
use crate::vdbe::builder::ProgramBuilder;
use crate::{schema::Schema, Result, SymbolTable};
use sqlite3_parser::ast::{Expr, Limit, QualifiedName};
use std::rc::Weak;
use std::{cell::RefCell, rc::Rc};
use super::plan::{TableReference, TableReferenceType};
pub fn translate_delete(
program: &mut ProgramBuilder,
schema: &Schema,
tbl_name: &QualifiedName,
where_clause: Option<Expr>,
limit: Option<Box<Limit>>,
database_header: Rc<RefCell<DatabaseHeader>>,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
) -> Result<()> {
let mut delete_plan = prepare_delete_plan(schema, tbl_name, where_clause, limit)?;
optimize_plan(&mut delete_plan)?;
emit_program(database_header, delete_plan, connection, syms)
emit_program(program, delete_plan, syms)
}
pub fn prepare_delete_plan(

View File

@@ -1,19 +1,16 @@
// This module contains code for emitting bytecode instructions for SQL query execution.
// It handles translating high-level SQL operations into low-level bytecode that can be executed by the virtual machine.
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::{Rc, Weak};
use sqlite3_parser::ast::{self};
use crate::function::Func;
use crate::storage::sqlite3_ondisk::DatabaseHeader;
use crate::translate::plan::{DeletePlan, Plan, Search};
use crate::util::exprs_are_equivalent;
use crate::vdbe::builder::ProgramBuilder;
use crate::vdbe::{insn::Insn, BranchOffset, Program};
use crate::{Connection, Result, SymbolTable};
use crate::vdbe::{insn::Insn, BranchOffset};
use crate::{Result, SymbolTable};
use super::aggregation::emit_ungrouped_aggregation;
use super::group_by::{emit_group_by, init_group_by, GroupByMetadata};
@@ -38,14 +35,13 @@ impl<'a> Resolver<'a> {
}
pub fn resolve_function(&self, func_name: &str, arg_count: usize) -> Option<Func> {
let func_type = match Func::resolve_function(&func_name, arg_count).ok() {
match Func::resolve_function(func_name, arg_count).ok() {
Some(func) => Some(func),
None => self
.symbol_table
.resolve_function(&func_name, arg_count)
.map(|func| Func::External(func)),
};
func_type
.resolve_function(func_name, arg_count)
.map(|arg| Func::External(arg.clone())),
}
}
pub fn resolve_cached_expr_reg(&self, expr: &ast::Expr) -> Option<usize> {
@@ -99,9 +95,9 @@ pub enum OperationMode {
/// Initialize the program with basic setup and return initial metadata and labels
fn prologue<'a>(
program: &mut ProgramBuilder,
syms: &'a SymbolTable,
) -> Result<(ProgramBuilder, TranslateCtx<'a>, BranchOffset, BranchOffset)> {
let mut program = ProgramBuilder::new();
) -> Result<(TranslateCtx<'a>, BranchOffset, BranchOffset)> {
let init_label = program.allocate_label();
program.emit_insn(Insn::Init {
@@ -124,7 +120,7 @@ fn prologue<'a>(
resolver: Resolver::new(syms),
};
Ok((program, t_ctx, init_label, start_offset))
Ok((t_ctx, init_label, start_offset))
}
/// Clean up and finalize the program, resolving any remaining labels
@@ -153,41 +149,34 @@ fn epilogue(
/// Main entry point for emitting bytecode for a SQL query
/// Takes a query plan and generates the corresponding bytecode program
pub fn emit_program(
database_header: Rc<RefCell<DatabaseHeader>>,
plan: Plan,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
pub fn emit_program(program: &mut ProgramBuilder, plan: Plan, syms: &SymbolTable) -> Result<()> {
match plan {
Plan::Select(plan) => emit_program_for_select(database_header, plan, connection, syms),
Plan::Delete(plan) => emit_program_for_delete(database_header, plan, connection, syms),
Plan::Select(plan) => emit_program_for_select(program, plan, syms),
Plan::Delete(plan) => emit_program_for_delete(program, plan, syms),
}
}
fn emit_program_for_select(
database_header: Rc<RefCell<DatabaseHeader>>,
program: &mut ProgramBuilder,
mut plan: SelectPlan,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
let (mut program, mut t_ctx, init_label, start_offset) = prologue(syms)?;
) -> Result<()> {
let (mut t_ctx, init_label, start_offset) = prologue(program, syms)?;
// Trivial exit on LIMIT 0
if let Some(limit) = plan.limit {
if limit == 0 {
epilogue(&mut program, init_label, start_offset)?;
return Ok(program.build(database_header, connection));
epilogue(program, init_label, start_offset)?;
}
}
// Emit main parts of query
emit_query(&mut program, &mut plan, &mut t_ctx)?;
emit_query(program, &mut plan, &mut t_ctx)?;
// Finalize program
epilogue(&mut program, init_label, start_offset)?;
epilogue(program, init_label, start_offset)?;
Ok(program.build(database_header, connection))
Ok(())
}
pub fn emit_query<'a>(
@@ -263,12 +252,11 @@ pub fn emit_query<'a>(
}
fn emit_program_for_delete(
database_header: Rc<RefCell<DatabaseHeader>>,
program: &mut ProgramBuilder,
mut plan: DeletePlan,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
let (mut program, mut t_ctx, init_label, start_offset) = prologue(syms)?;
) -> Result<()> {
let (mut t_ctx, init_label, start_offset) = prologue(program, syms)?;
// No rows will be read from source table loops if there is a constant false condition eg. WHERE 0
let after_main_loop_label = program.allocate_label();
@@ -279,32 +267,27 @@ fn emit_program_for_delete(
}
// Initialize cursors and other resources needed for query execution
init_loop(
&mut program,
&mut t_ctx,
&plan.source,
&OperationMode::DELETE,
)?;
init_loop(program, &mut t_ctx, &plan.source, &OperationMode::DELETE)?;
// Set up main query execution loop
open_loop(
&mut program,
program,
&mut t_ctx,
&mut plan.source,
&plan.referenced_tables,
)?;
emit_delete_insns(&mut program, &mut t_ctx, &plan.source, &plan.limit)?;
emit_delete_insns(program, &mut t_ctx, &plan.source, &plan.limit)?;
// Clean up and close the main execution loop
close_loop(&mut program, &mut t_ctx, &plan.source)?;
close_loop(program, &mut t_ctx, &plan.source)?;
program.resolve_label(after_main_loop_label, program.offset());
// Finalize program
epilogue(&mut program, init_label, start_offset)?;
epilogue(program, init_label, start_offset)?;
Ok(program.build(database_header, connection))
Ok(())
}
fn emit_delete_insns<'a>(

View File

@@ -1,7 +1,5 @@
use sqlite3_parser::ast::{self, UnaryOperator};
#[cfg(feature = "uuid")]
use crate::ext::{ExtFunc, UuidFunc};
#[cfg(feature = "json")]
use crate::function::JsonFunc;
use crate::function::{Func, FuncCtx, MathFuncArity, ScalarFunc};
@@ -18,7 +16,6 @@ pub struct ConditionMetadata {
pub jump_if_condition_is_true: bool,
pub jump_target_when_true: BranchOffset,
pub jump_target_when_false: BranchOffset,
pub parent_op: Option<ast::Operator>,
}
fn emit_cond_jump(program: &mut ProgramBuilder, cond_meta: ConditionMetadata, reg: usize) {
@@ -157,87 +154,53 @@ pub fn translate_condition_expr(
match expr {
ast::Expr::Between { .. } => todo!(),
ast::Expr::Binary(lhs, ast::Operator::And, rhs) => {
// In a binary AND, never jump to the 'jump_target_when_true' label on the first condition, because
// the second condition must also be true.
let _ = translate_condition_expr(
// In a binary AND, never jump to the parent 'jump_target_when_true' label on the first condition, because
// the second condition MUST also be true. Instead we instruct the child expression to jump to a local
// true label.
let jump_target_when_true = program.allocate_label();
translate_condition_expr(
program,
referenced_tables,
lhs,
ConditionMetadata {
jump_if_condition_is_true: false,
// Mark that the parent op for sub-expressions is AND
parent_op: Some(ast::Operator::And),
jump_target_when_true,
..condition_metadata
},
resolver,
);
let _ = translate_condition_expr(
)?;
program.resolve_label(jump_target_when_true, program.offset());
translate_condition_expr(
program,
referenced_tables,
rhs,
condition_metadata,
resolver,
)?;
}
ast::Expr::Binary(lhs, ast::Operator::Or, rhs) => {
// In a binary OR, never jump to the parent 'jump_target_when_false' label on the first condition, because
// the second condition CAN also be true. Instead we instruct the child expression to jump to a local
// false label.
let jump_target_when_false = program.allocate_label();
translate_condition_expr(
program,
referenced_tables,
lhs,
ConditionMetadata {
parent_op: Some(ast::Operator::And),
jump_if_condition_is_true: true,
jump_target_when_false,
..condition_metadata
},
resolver,
);
}
ast::Expr::Binary(lhs, ast::Operator::Or, rhs) => {
if matches!(condition_metadata.parent_op, Some(ast::Operator::And)) {
// we are inside a bigger AND expression, so we do NOT jump to parent's 'true' if LHS or RHS is true.
// we only short-circuit the parent's false label if LHS and RHS are both false.
let local_true_label = program.allocate_label();
let local_false_label = program.allocate_label();
// evaluate LHS in normal OR fashion, short-circuit local if true
let lhs_metadata = ConditionMetadata {
jump_if_condition_is_true: true,
jump_target_when_true: local_true_label,
jump_target_when_false: local_false_label,
parent_op: Some(ast::Operator::Or),
};
translate_condition_expr(program, referenced_tables, lhs, lhs_metadata, resolver)?;
// if lhs was false, we land here:
program.resolve_label(local_false_label, program.offset());
// evaluate rhs with normal OR: short-circuit if true, go to local_true
let rhs_metadata = ConditionMetadata {
jump_if_condition_is_true: true,
jump_target_when_true: local_true_label,
jump_target_when_false: condition_metadata.jump_target_when_false,
// if rhs is also false => parent's false
parent_op: Some(ast::Operator::Or),
};
translate_condition_expr(program, referenced_tables, rhs, rhs_metadata, resolver)?;
// if we get here, both lhs+rhs are false: explicit jump to parent's false
program.emit_insn(Insn::Goto {
target_pc: condition_metadata.jump_target_when_false,
});
// local_true: we do not jump to parent's "true" label because the parent is AND,
// so we want to keep evaluating the rest
program.resolve_label(local_true_label, program.offset());
} else {
let jump_target_when_false = program.allocate_label();
let lhs_metadata = ConditionMetadata {
jump_if_condition_is_true: true,
jump_target_when_false,
parent_op: Some(ast::Operator::Or),
..condition_metadata
};
translate_condition_expr(program, referenced_tables, lhs, lhs_metadata, resolver)?;
// if LHS was false, we land here:
program.resolve_label(jump_target_when_false, program.offset());
let rhs_metadata = ConditionMetadata {
parent_op: Some(ast::Operator::Or),
..condition_metadata
};
translate_condition_expr(program, referenced_tables, rhs, rhs_metadata, resolver)?;
}
)?;
program.resolve_label(jump_target_when_false, program.offset());
translate_condition_expr(
program,
referenced_tables,
rhs,
condition_metadata,
resolver,
)?;
}
ast::Expr::Binary(lhs, op, rhs) => {
let lhs_reg = translate_and_mark(program, Some(referenced_tables), lhs, resolver)?;
@@ -633,6 +596,20 @@ pub fn translate_expr(
dest: target_register,
});
}
ast::Operator::RightShift => {
program.emit_insn(Insn::ShiftRight {
lhs: e1_reg,
rhs: e2_reg,
dest: target_register,
});
}
ast::Operator::LeftShift => {
program.emit_insn(Insn::ShiftLeft {
lhs: e1_reg,
rhs: e2_reg,
dest: target_register,
});
}
#[cfg(feature = "json")]
op @ (ast::Operator::ArrowRight | ast::Operator::ArrowRightShift) => {
let json_func = match op {
@@ -782,13 +759,23 @@ pub fn translate_expr(
crate::bail_parse_error!("aggregation function in non-aggregation context")
}
Func::External(_) => {
let regs = program.alloc_register();
let regs = program.alloc_registers(args_count);
for (i, arg_expr) in args.iter().enumerate() {
translate_expr(
program,
referenced_tables,
&arg_expr[i],
regs + i,
resolver,
)?;
}
program.emit_insn(Insn::Function {
constant_mask: 0,
start_reg: regs,
dest: target_register,
func: func_ctx,
});
Ok(target_register)
}
#[cfg(feature = "json")]
@@ -1114,6 +1101,19 @@ pub fn translate_expr(
});
Ok(target_register)
}
#[cfg(not(target_family = "wasm"))]
ScalarFunc::LoadExtension => {
let args = expect_arguments_exact!(args, 1, srf);
let reg =
translate_and_mark(program, referenced_tables, &args[0], resolver)?;
program.emit_insn(Insn::Function {
constant_mask: 0,
start_reg: reg,
dest: target_register,
func: func_ctx,
});
Ok(target_register)
}
ScalarFunc::Random => {
if args.is_some() {
crate::bail_parse_error!(
@@ -1458,60 +1458,6 @@ pub fn translate_expr(
}
}
}
Func::Extension(ext_func) => match ext_func {
#[cfg(feature = "uuid")]
ExtFunc::Uuid(ref uuid_fn) => match uuid_fn {
UuidFunc::UuidStr | UuidFunc::UuidBlob | UuidFunc::Uuid7TS => {
let args = expect_arguments_exact!(args, 1, ext_func);
let regs = program.alloc_register();
translate_expr(program, referenced_tables, &args[0], regs, resolver)?;
program.emit_insn(Insn::Function {
constant_mask: 0,
start_reg: regs,
dest: target_register,
func: func_ctx,
});
Ok(target_register)
}
UuidFunc::Uuid4Str => {
if args.is_some() {
crate::bail_parse_error!(
"{} function with arguments",
ext_func.to_string()
);
}
let regs = program.alloc_register();
program.emit_insn(Insn::Function {
constant_mask: 0,
start_reg: regs,
dest: target_register,
func: func_ctx,
});
Ok(target_register)
}
UuidFunc::Uuid7 => {
let args = expect_arguments_max!(args, 1, ext_func);
let mut start_reg = None;
if let Some(arg) = args.first() {
start_reg = Some(translate_and_mark(
program,
referenced_tables,
arg,
resolver,
)?);
}
program.emit_insn(Insn::Function {
constant_mask: 0,
start_reg: start_reg.unwrap_or(target_register),
dest: target_register,
func: func_ctx,
});
Ok(target_register)
}
},
#[allow(unreachable_patterns)]
_ => unreachable!("{ext_func} not implemented yet"),
},
Func::Math(math_func) => match math_func.arity() {
MathFuncArity::Nullary => {
if args.is_some() {
@@ -1621,6 +1567,15 @@ pub fn translate_expr(
}
}
}
ast::Expr::RowId { database: _, table } => {
let tbl_ref = referenced_tables.as_ref().unwrap().get(*table).unwrap();
let cursor_id = program.resolve_cursor_id(&tbl_ref.table_identifier);
program.emit_insn(Insn::RowId {
cursor_id,
dest: target_register,
});
Ok(target_register)
}
ast::Expr::InList { .. } => todo!(),
ast::Expr::InSelect { .. } => todo!(),
ast::Expr::InTable { .. } => todo!(),
@@ -1727,7 +1682,6 @@ pub fn translate_expr(
dest: target_register,
});
}
program.mark_last_insn_constant();
Ok(target_register)
}
(UnaryOperator::Negative | UnaryOperator::Positive, _) => {
@@ -1766,7 +1720,6 @@ pub fn translate_expr(
dest: target_register,
});
}
program.mark_last_insn_constant();
Ok(target_register)
}
(UnaryOperator::BitwiseNot, ast::Expr::Literal(ast::Literal::Null)) => {
@@ -1774,7 +1727,6 @@ pub fn translate_expr(
dest: target_register,
dest_end: None,
});
program.mark_last_insn_constant();
Ok(target_register)
}
(UnaryOperator::BitwiseNot, _) => {
@@ -1788,7 +1740,14 @@ pub fn translate_expr(
}
_ => todo!(),
},
ast::Expr::Variable(_) => todo!(),
ast::Expr::Variable(name) => {
let index = program.parameters.push(name);
program.emit_insn(Insn::Variable {
index,
dest: target_register,
});
Ok(target_register)
}
}
}

View File

@@ -386,7 +386,6 @@ pub fn emit_group_by<'a>(
jump_if_condition_is_true: false,
jump_target_when_false: group_by_end_without_emitting_row_label,
jump_target_when_true: BranchOffset::Placeholder, // not used. FIXME: this is a bug. HAVING can have e.g. HAVING a OR b.
parent_op: None,
},
&t_ctx.resolver,
)?;

View File

@@ -1,5 +1,4 @@
use std::rc::Weak;
use std::{cell::RefCell, ops::Deref, rc::Rc};
use std::ops::Deref;
use sqlite3_parser::ast::{
DistinctNames, Expr, InsertBody, QualifiedName, ResolveType, ResultColumn, With,
@@ -9,23 +8,22 @@ use crate::error::SQLITE_CONSTRAINT_PRIMARYKEY;
use crate::schema::BTreeTable;
use crate::util::normalize_ident;
use crate::vdbe::BranchOffset;
use crate::Result;
use crate::{
schema::{Column, Schema},
storage::sqlite3_ondisk::DatabaseHeader,
translate::expr::translate_expr,
vdbe::{
builder::{CursorType, ProgramBuilder},
insn::Insn,
Program,
},
SymbolTable,
};
use crate::{Connection, Result};
use super::emitter::Resolver;
#[allow(clippy::too_many_arguments)]
pub fn translate_insert(
program: &mut ProgramBuilder,
schema: &Schema,
with: &Option<With>,
on_conflict: &Option<ResolveType>,
@@ -33,17 +31,14 @@ pub fn translate_insert(
columns: &Option<DistinctNames>,
body: &InsertBody,
_returning: &Option<Vec<ResultColumn>>,
database_header: Rc<RefCell<DatabaseHeader>>,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
) -> Result<()> {
if with.is_some() {
crate::bail_parse_error!("WITH clause is not supported");
}
if on_conflict.is_some() {
crate::bail_parse_error!("ON CONFLICT clause is not supported");
}
let mut program = ProgramBuilder::new();
let resolver = Resolver::new(syms);
let init_label = program.allocate_label();
program.emit_insn(Insn::Init {
@@ -118,7 +113,7 @@ pub fn translate_insert(
for value in values {
populate_column_registers(
&mut program,
program,
value,
&column_mappings,
column_registers_start,
@@ -157,7 +152,7 @@ pub fn translate_insert(
program.emit_insn(Insn::OpenWriteAwait {});
populate_column_registers(
&mut program,
program,
&values[0],
&column_mappings,
column_registers_start,
@@ -262,7 +257,8 @@ pub fn translate_insert(
program.emit_insn(Insn::Goto {
target_pc: start_offset,
});
Ok(program.build(database_header, connection))
Ok(())
}
#[derive(Debug)]

View File

@@ -230,7 +230,6 @@ pub fn open_loop(
jump_if_condition_is_true: false,
jump_target_when_true,
jump_target_when_false: next,
parent_op: None,
};
translate_condition_expr(
program,
@@ -279,7 +278,6 @@ pub fn open_loop(
jump_if_condition_is_true: false,
jump_target_when_true,
jump_target_when_false,
parent_op: None,
};
for predicate in predicates.iter() {
translate_condition_expr(
@@ -352,7 +350,6 @@ pub fn open_loop(
jump_if_condition_is_true: false,
jump_target_when_true,
jump_target_when_false: next,
parent_op: None,
};
translate_condition_expr(
program,
@@ -537,7 +534,6 @@ pub fn open_loop(
jump_if_condition_is_true: false,
jump_target_when_true,
jump_target_when_false: next,
parent_op: None,
};
translate_condition_expr(
program,

View File

@@ -32,8 +32,7 @@ use crate::vdbe::{builder::ProgramBuilder, insn::Insn, Program};
use crate::{bail_parse_error, Connection, LimboError, Result, SymbolTable};
use insert::translate_insert;
use select::translate_select;
use sqlite3_parser::ast::fmt::ToTokens;
use sqlite3_parser::ast::{self, PragmaName};
use sqlite3_parser::ast::{self, fmt::ToTokens, PragmaName};
use std::cell::RefCell;
use std::fmt::Display;
use std::rc::{Rc, Weak};
@@ -48,6 +47,8 @@ pub fn translate(
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
let mut program = ProgramBuilder::new();
match stmt {
ast::Stmt::AlterTable(_, _) => bail_parse_error!("ALTER TABLE not supported yet"),
ast::Stmt::Analyze(_) => bail_parse_error!("ANALYZE not supported yet"),
@@ -64,14 +65,8 @@ pub fn translate(
if temporary {
bail_parse_error!("TEMPORARY table not supported yet");
}
translate_create_table(
tbl_name,
body,
if_not_exists,
database_header,
connection,
schema,
)
translate_create_table(&mut program, tbl_name, body, if_not_exists, schema)?;
}
ast::Stmt::CreateTrigger { .. } => bail_parse_error!("CREATE TRIGGER not supported yet"),
ast::Stmt::CreateView { .. } => bail_parse_error!("CREATE VIEW not supported yet"),
@@ -83,29 +78,23 @@ pub fn translate(
where_clause,
limit,
..
} => translate_delete(
schema,
&tbl_name,
where_clause,
limit,
database_header,
connection,
syms,
),
} => {
translate_delete(&mut program, schema, &tbl_name, where_clause, limit, syms)?;
}
ast::Stmt::Detach(_) => bail_parse_error!("DETACH not supported yet"),
ast::Stmt::DropIndex { .. } => bail_parse_error!("DROP INDEX not supported yet"),
ast::Stmt::DropTable { .. } => bail_parse_error!("DROP TABLE not supported yet"),
ast::Stmt::DropTrigger { .. } => bail_parse_error!("DROP TRIGGER not supported yet"),
ast::Stmt::DropView { .. } => bail_parse_error!("DROP VIEW not supported yet"),
ast::Stmt::Pragma(name, body) => {
translate_pragma(&name, body, database_header, pager, connection)
translate_pragma(&mut program, &name, body, database_header.clone(), pager)?;
}
ast::Stmt::Reindex { .. } => bail_parse_error!("REINDEX not supported yet"),
ast::Stmt::Release(_) => bail_parse_error!("RELEASE not supported yet"),
ast::Stmt::Rollback { .. } => bail_parse_error!("ROLLBACK not supported yet"),
ast::Stmt::Savepoint(_) => bail_parse_error!("SAVEPOINT not supported yet"),
ast::Stmt::Select(select) => {
translate_select(schema, *select, database_header, connection, syms)
translate_select(&mut program, schema, *select, syms)?;
}
ast::Stmt::Update { .. } => bail_parse_error!("UPDATE not supported yet"),
ast::Stmt::Vacuum(_, _) => bail_parse_error!("VACUUM not supported yet"),
@@ -116,19 +105,22 @@ pub fn translate(
columns,
body,
returning,
} => translate_insert(
schema,
&with,
&or_conflict,
&tbl_name,
&columns,
&body,
&returning,
database_header,
connection,
syms,
),
} => {
translate_insert(
&mut program,
schema,
&with,
&or_conflict,
&tbl_name,
&columns,
&body,
&returning,
syms,
)?;
}
}
Ok(program.build(database_header, connection))
}
/* Example:
@@ -378,14 +370,12 @@ fn check_automatic_pk_index_required(
}
fn translate_create_table(
program: &mut ProgramBuilder,
tbl_name: ast::QualifiedName,
body: ast::CreateTableBody,
if_not_exists: bool,
database_header: Rc<RefCell<DatabaseHeader>>,
connection: Weak<Connection>,
schema: &Schema,
) -> Result<Program> {
let mut program = ProgramBuilder::new();
) -> Result<()> {
if schema.get_table(tbl_name.name.0.as_str()).is_some() {
if if_not_exists {
let init_label = program.allocate_label();
@@ -403,7 +393,8 @@ fn translate_create_table(
program.emit_insn(Insn::Goto {
target_pc: start_offset,
});
return Ok(program.build(database_header, connection));
return Ok(());
}
bail_parse_error!("Table {} already exists", tbl_name);
}
@@ -453,7 +444,7 @@ fn translate_create_table(
// https://github.com/sqlite/sqlite/blob/95f6df5b8d55e67d1e34d2bff217305a2f21b1fb/src/build.c#L2856-L2871
// https://github.com/sqlite/sqlite/blob/95f6df5b8d55e67d1e34d2bff217305a2f21b1fb/src/build.c#L1334C5-L1336C65
let index_root_reg = check_automatic_pk_index_required(&body, &mut program, &tbl_name.name.0)?;
let index_root_reg = check_automatic_pk_index_required(&body, program, &tbl_name.name.0)?;
if let Some(index_root_reg) = index_root_reg {
program.emit_insn(Insn::CreateBtree {
db: 0,
@@ -476,7 +467,7 @@ fn translate_create_table(
// Add the table entry to sqlite_schema
emit_schema_entry(
&mut program,
program,
sqlite_schema_cursor_id,
SchemaEntryType::Table,
&tbl_name.name.0,
@@ -492,7 +483,7 @@ fn translate_create_table(
PRIMARY_KEY_AUTOMATIC_INDEX_NAME_PREFIX, tbl_name.name.0
);
emit_schema_entry(
&mut program,
program,
sqlite_schema_cursor_id,
SchemaEntryType::Index,
&index_name,
@@ -523,7 +514,8 @@ fn translate_create_table(
program.emit_insn(Insn::Goto {
target_pc: start_offset,
});
Ok(program.build(database_header, connection))
Ok(())
}
enum PrimaryKeyDefinitionType<'a> {
@@ -532,13 +524,12 @@ enum PrimaryKeyDefinitionType<'a> {
}
fn translate_pragma(
program: &mut ProgramBuilder,
name: &ast::QualifiedName,
body: Option<ast::PragmaBody>,
database_header: Rc<RefCell<DatabaseHeader>>,
pager: Rc<Pager>,
connection: Weak<Connection>,
) -> Result<Program> {
let mut program = ProgramBuilder::new();
) -> Result<()> {
let init_label = program.allocate_label();
program.emit_insn(Insn::Init {
target_pc: init_label,
@@ -548,17 +539,11 @@ fn translate_pragma(
match body {
None => {
let pragma_name = &name.name.0;
query_pragma(pragma_name, database_header.clone(), &mut program)?;
query_pragma(pragma_name, database_header.clone(), program)?;
}
Some(ast::PragmaBody::Equals(value)) => {
write = true;
update_pragma(
&name.name.0,
value,
database_header.clone(),
pager,
&mut program,
)?;
update_pragma(&name.name.0, value, database_header.clone(), pager, program)?;
}
Some(ast::PragmaBody::Call(_)) => {
todo!()
@@ -574,7 +559,8 @@ fn translate_pragma(
program.emit_insn(Insn::Goto {
target_pc: start_offset,
});
Ok(program.build(database_header, connection))
Ok(())
}
fn update_pragma(

View File

@@ -1,6 +1,7 @@
use super::{
plan::{Aggregate, Plan, SelectQueryType, SourceOperator, TableReference, TableReferenceType},
select::prepare_select_plan,
SymbolTable,
};
use crate::{
function::Func,
@@ -11,6 +12,8 @@ use crate::{
};
use sqlite3_parser::ast::{self, Expr, FromClause, JoinType, Limit};
pub const ROWID: &'static str = "rowid";
pub struct OperatorIdCounter {
id: usize,
}
@@ -101,8 +104,18 @@ pub fn bind_column_references(
if id.0.eq_ignore_ascii_case("true") || id.0.eq_ignore_ascii_case("false") {
return Ok(());
}
let mut match_result = None;
let normalized_id = normalize_ident(id.0.as_str());
if referenced_tables.len() > 0 {
if let Some(row_id_expr) =
parse_row_id(&normalized_id, 0, || referenced_tables.len() != 1)?
{
*expr = row_id_expr;
return Ok(());
}
}
let mut match_result = None;
for (tbl_idx, table) in referenced_tables.iter().enumerate() {
let col_idx = table
.columns()
@@ -139,6 +152,12 @@ pub fn bind_column_references(
}
let tbl_idx = matching_tbl_idx.unwrap();
let normalized_id = normalize_ident(id.0.as_str());
if let Some(row_id_expr) = parse_row_id(&normalized_id, tbl_idx, || false)? {
*expr = row_id_expr;
return Ok(());
}
let col_idx = referenced_tables[tbl_idx]
.columns()
.iter()
@@ -208,7 +227,7 @@ pub fn bind_column_references(
Ok(())
}
// Already bound earlier
ast::Expr::Column { .. } => Ok(()),
ast::Expr::Column { .. } | ast::Expr::RowId { .. } => Ok(()),
ast::Expr::DoublyQualified(_, _, _) => todo!(),
ast::Expr::Exists(_) => todo!(),
ast::Expr::FunctionCallStar { .. } => Ok(()),
@@ -250,7 +269,7 @@ pub fn bind_column_references(
bind_column_references(expr, referenced_tables)?;
Ok(())
}
ast::Expr::Variable(_) => todo!(),
ast::Expr::Variable(_) => Ok(()),
}
}
@@ -259,6 +278,7 @@ fn parse_from_clause_table(
table: ast::SelectTable,
operator_id_counter: &mut OperatorIdCounter,
cur_table_index: usize,
syms: &SymbolTable,
) -> Result<(TableReference, SourceOperator)> {
match table {
ast::SelectTable::Table(qualified_name, maybe_alias, _) => {
@@ -289,7 +309,7 @@ fn parse_from_clause_table(
))
}
ast::SelectTable::Select(subselect, maybe_alias) => {
let Plan::Select(mut subplan) = prepare_select_plan(schema, *subselect)? else {
let Plan::Select(mut subplan) = prepare_select_plan(schema, *subselect, syms)? else {
unreachable!();
};
subplan.query_type = SelectQueryType::Subquery {
@@ -322,6 +342,7 @@ pub fn parse_from(
schema: &Schema,
mut from: Option<FromClause>,
operator_id_counter: &mut OperatorIdCounter,
syms: &SymbolTable,
) -> Result<(SourceOperator, Vec<TableReference>)> {
if from.as_ref().and_then(|f| f.select.as_ref()).is_none() {
return Ok((
@@ -339,7 +360,7 @@ pub fn parse_from(
let select_owned = *std::mem::take(&mut from_owned.select).unwrap();
let joins_owned = std::mem::take(&mut from_owned.joins).unwrap_or_default();
let (table_reference, mut operator) =
parse_from_clause_table(schema, select_owned, operator_id_counter, table_index)?;
parse_from_clause_table(schema, select_owned, operator_id_counter, table_index, syms)?;
tables.push(table_reference);
table_index += 1;
@@ -350,7 +371,14 @@ pub fn parse_from(
is_outer_join: outer,
using,
predicates,
} = parse_join(schema, join, operator_id_counter, &mut tables, table_index)?;
} = parse_join(
schema,
join,
operator_id_counter,
&mut tables,
table_index,
syms,
)?;
operator = SourceOperator::Join {
left: Box::new(operator),
right: Box::new(right),
@@ -394,6 +422,7 @@ fn parse_join(
operator_id_counter: &mut OperatorIdCounter,
tables: &mut Vec<TableReference>,
table_index: usize,
syms: &SymbolTable,
) -> Result<JoinParseResult> {
let ast::JoinedSelectTable {
operator: join_operator,
@@ -402,7 +431,7 @@ fn parse_join(
} = join;
let (table_reference, source_operator) =
parse_from_clause_table(schema, table, operator_id_counter, table_index)?;
parse_from_clause_table(schema, table, operator_id_counter, table_index, syms)?;
tables.push(table_reference);
@@ -571,3 +600,20 @@ pub fn break_predicate_at_and_boundaries(
}
}
}
fn parse_row_id<F>(column_name: &str, table_id: usize, fn_check: F) -> Result<Option<ast::Expr>>
where
F: FnOnce() -> bool,
{
if column_name.eq_ignore_ascii_case(ROWID) {
if fn_check() {
crate::bail_parse_error!("ROWID is ambiguous");
}
return Ok(Some(ast::Expr::RowId {
database: None, // TODO: support different databases
table: table_id,
}));
}
Ok(None)
}

View File

@@ -1,11 +1,7 @@
use std::rc::Weak;
use std::{cell::RefCell, rc::Rc};
use super::emitter::emit_program;
use super::expr::get_name;
use super::plan::SelectQueryType;
use crate::function::Func;
use crate::storage::sqlite3_ondisk::DatabaseHeader;
use crate::translate::optimizer::optimize_plan;
use crate::translate::plan::{Aggregate, Direction, GroupBy, Plan, ResultSetColumn, SelectPlan};
use crate::translate::planner::{
@@ -13,24 +9,27 @@ use crate::translate::planner::{
parse_where, resolve_aggregates, OperatorIdCounter,
};
use crate::util::normalize_ident;
use crate::{schema::Schema, vdbe::Program, Result};
use crate::{Connection, SymbolTable};
use crate::SymbolTable;
use crate::{schema::Schema, vdbe::builder::ProgramBuilder, Result};
use sqlite3_parser::ast;
use sqlite3_parser::ast::ResultColumn;
pub fn translate_select(
program: &mut ProgramBuilder,
schema: &Schema,
select: ast::Select,
database_header: Rc<RefCell<DatabaseHeader>>,
connection: Weak<Connection>,
syms: &SymbolTable,
) -> Result<Program> {
let mut select_plan = prepare_select_plan(schema, select)?;
) -> Result<()> {
let mut select_plan = prepare_select_plan(schema, select, syms)?;
optimize_plan(&mut select_plan)?;
emit_program(database_header, select_plan, connection, syms)
emit_program(program, select_plan, syms)
}
pub fn prepare_select_plan(schema: &Schema, select: ast::Select) -> Result<Plan> {
pub fn prepare_select_plan(
schema: &Schema,
select: ast::Select,
syms: &SymbolTable,
) -> Result<Plan> {
match *select.body.select {
ast::OneSelect::Select {
mut columns,
@@ -47,7 +46,8 @@ pub fn prepare_select_plan(schema: &Schema, select: ast::Select) -> Result<Plan>
let mut operator_id_counter = OperatorIdCounter::new();
// Parse the FROM clause
let (source, referenced_tables) = parse_from(schema, from, &mut operator_id_counter)?;
let (source, referenced_tables) =
parse_from(schema, from, &mut operator_id_counter, syms)?;
let mut plan = SelectPlan {
source,
@@ -147,7 +147,24 @@ pub fn prepare_select_plan(schema: &Schema, select: ast::Select) -> Result<Plan>
contains_aggregates,
});
}
_ => {}
Err(_) => {
if syms.functions.contains_key(&name.0) {
let contains_aggregates = resolve_aggregates(
expr,
&mut aggregate_expressions,
);
plan.result_columns.push(ResultSetColumn {
name: get_name(
maybe_alias.as_ref(),
expr,
&plan.referenced_tables,
|| format!("expr_{}", result_column_idx),
),
expr: expr.clone(),
contains_aggregates,
});
}
}
}
}
ast::Expr::FunctionCallStar {
@@ -185,7 +202,7 @@ pub fn prepare_select_plan(schema: &Schema, select: ast::Select) -> Result<Plan>
}
expr => {
let contains_aggregates =
resolve_aggregates(&expr, &mut aggregate_expressions);
resolve_aggregates(expr, &mut aggregate_expressions);
plan.result_columns.push(ResultSetColumn {
name: get_name(
maybe_alias.as_ref(),

View File

@@ -1,11 +1,10 @@
use crate::error::LimboError;
use crate::ext::{ExtValue, ExtValueType};
use crate::storage::sqlite3_ondisk::write_varint;
use crate::Result;
use std::fmt::Display;
use std::rc::Rc;
use crate::error::LimboError;
use crate::Result;
use crate::storage::sqlite3_ondisk::write_varint;
#[derive(Debug, Clone, PartialEq)]
pub enum Value<'a> {
Null,
@@ -94,6 +93,50 @@ impl Display for OwnedValue {
}
}
impl OwnedValue {
pub fn to_ffi(&self) -> ExtValue {
match self {
Self::Null => ExtValue::null(),
Self::Integer(i) => ExtValue::from_integer(*i),
Self::Float(fl) => ExtValue::from_float(*fl),
Self::Text(text) => ExtValue::from_text(text.value.to_string()),
Self::Blob(blob) => ExtValue::from_blob(blob.to_vec()),
Self::Agg(_) => todo!("Aggregate values not yet supported"),
Self::Record(_) => todo!("Record values not yet supported"),
}
}
pub fn from_ffi(v: &ExtValue) -> Self {
match v.value_type() {
ExtValueType::Null => OwnedValue::Null,
ExtValueType::Integer => {
let Some(int) = v.to_integer() else {
return OwnedValue::Null;
};
OwnedValue::Integer(int)
}
ExtValueType::Float => {
let Some(float) = v.to_float() else {
return OwnedValue::Null;
};
OwnedValue::Float(float)
}
ExtValueType::Text => {
let Some(text) = v.to_text() else {
return OwnedValue::Null;
};
OwnedValue::build_text(std::rc::Rc::new(text))
}
ExtValueType::Blob => {
let Some(blob) = v.to_blob() else {
return OwnedValue::Null;
};
OwnedValue::Blob(std::rc::Rc::new(blob))
}
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum AggContext {
Avg(OwnedValue, OwnedValue), // acc and count
@@ -293,6 +336,18 @@ impl std::ops::DivAssign<OwnedValue> for OwnedValue {
}
}
impl From<Value<'_>> for OwnedValue {
fn from(value: Value<'_>) -> Self {
match value {
Value::Null => OwnedValue::Null,
Value::Integer(i) => OwnedValue::Integer(i),
Value::Float(f) => OwnedValue::Float(f),
Value::Text(s) => OwnedValue::Text(LimboText::new(Rc::new(s.to_owned()))),
Value::Blob(b) => OwnedValue::Blob(Rc::new(b.to_owned())),
}
}
}
pub fn to_value(value: &OwnedValue) -> Value<'_> {
match value {
OwnedValue::Null => Value::Null,

View File

@@ -5,13 +5,13 @@ use std::{
};
use crate::{
parameters::Parameters,
schema::{BTreeTable, Index, PseudoTable},
storage::sqlite3_ondisk::DatabaseHeader,
Connection,
};
use super::{BranchOffset, CursorID, Insn, InsnReference, Program};
#[allow(dead_code)]
pub struct ProgramBuilder {
next_free_register: usize,
@@ -29,6 +29,7 @@ pub struct ProgramBuilder {
seekrowid_emitted_bitmask: u64,
// map of instruction index to manual comment (used in EXPLAIN)
comments: HashMap<InsnReference, &'static str>,
pub parameters: Parameters,
}
#[derive(Debug, Clone)]
@@ -58,6 +59,7 @@ impl ProgramBuilder {
label_to_resolved_offset: HashMap::new(),
seekrowid_emitted_bitmask: 0,
comments: HashMap::new(),
parameters: Parameters::new(),
}
}
@@ -331,6 +333,7 @@ impl ProgramBuilder {
self.constant_insns.is_empty(),
"constant_insns is not empty when build() is called, did you forget to call emit_constant_insns()?"
);
self.parameters.list.dedup();
Program {
max_registers: self.next_free_register,
insns: self.insns,
@@ -339,6 +342,7 @@ impl ProgramBuilder {
comments: self.comments,
connection,
auto_commit: true,
parameters: self.parameters,
}
}
}

View File

@@ -615,9 +615,8 @@ fn parse_modifier(modifier: &str) -> Result<Modifier> {
#[cfg(test)]
mod tests {
use std::rc::Rc;
use super::*;
use std::rc::Rc;
#[test]
fn test_valid_get_date_from_time_value() {
@@ -1399,7 +1398,6 @@ mod tests {
OwnedValue::build_text(Rc::new(value.to_string()))
}
// Basic helper to format NaiveDateTime for comparison
fn format(dt: NaiveDateTime) -> String {
dt.format("%Y-%m-%d %H:%M:%S").to_string()
}
@@ -1409,18 +1407,25 @@ mod tests {
#[test]
fn test_single_modifier() {
let now = Utc::now().naive_utc();
let expected = format(now - TimeDelta::days(1));
let result = exec_datetime(&[text("now"), text("-1 day")], DateTimeOutput::DateTime);
let time = setup_datetime();
let expected = format(time - TimeDelta::days(1));
let result = exec_datetime(
&[text("2023-06-15 12:30:45"), text("-1 day")],
DateTimeOutput::DateTime,
);
assert_eq!(result, text(&expected));
}
#[test]
fn test_multiple_modifiers() {
let now = Utc::now().naive_utc();
let expected = format(now - TimeDelta::days(1) + TimeDelta::hours(3));
let time = setup_datetime();
let expected = format(time - TimeDelta::days(1) + TimeDelta::hours(3));
let result = exec_datetime(
&[text("now"), text("-1 day"), text("+3 hours")],
&[
text("2023-06-15 12:30:45"),
text("-1 day"),
text("+3 hours"),
],
DateTimeOutput::DateTime,
);
assert_eq!(result, text(&expected));
@@ -1428,26 +1433,27 @@ mod tests {
#[test]
fn test_subsec_modifier() {
let now = Utc::now().naive_utc().time();
let result = exec_datetime(&[text("now"), text("subsec")], DateTimeOutput::Time);
let tolerance = TimeDelta::milliseconds(1);
let time = setup_datetime();
let result = exec_datetime(
&[text("2023-06-15 12:30:45"), text("subsec")],
DateTimeOutput::Time,
);
let result =
chrono::NaiveTime::parse_from_str(&result.to_string(), "%H:%M:%S%.3f").unwrap();
assert!(
(now - result).num_milliseconds().abs() <= tolerance.num_milliseconds(),
"Expected: {}, Actual: {}",
now,
result
);
assert_eq!(time.time(), result);
}
#[test]
fn test_start_of_day_modifier() {
let now = Utc::now().naive_utc();
let start_of_day = now.date().and_hms_opt(0, 0, 0).unwrap();
let time = setup_datetime();
let start_of_day = time.date().and_hms_opt(0, 0, 0).unwrap();
let expected = format(start_of_day - TimeDelta::days(1));
let result = exec_datetime(
&[text("now"), text("start of day"), text("-1 day")],
&[
text("2023-06-15 12:30:45"),
text("start of day"),
text("-1 day"),
],
DateTimeOutput::DateTime,
);
assert_eq!(result, text(&expected));
@@ -1455,14 +1461,18 @@ mod tests {
#[test]
fn test_start_of_month_modifier() {
let now = Utc::now().naive_utc();
let start_of_month = NaiveDate::from_ymd_opt(now.year(), now.month(), 1)
let time = setup_datetime();
let start_of_month = NaiveDate::from_ymd_opt(time.year(), time.month(), 1)
.unwrap()
.and_hms_opt(0, 0, 0)
.unwrap();
let expected = format(start_of_month + TimeDelta::days(1));
let result = exec_datetime(
&[text("now"), text("start of month"), text("+1 day")],
&[
text("2023-06-15 12:30:45"),
text("start of month"),
text("+1 day"),
],
DateTimeOutput::DateTime,
);
assert_eq!(result, text(&expected));
@@ -1470,15 +1480,15 @@ mod tests {
#[test]
fn test_start_of_year_modifier() {
let now = Utc::now().naive_utc();
let start_of_year = NaiveDate::from_ymd_opt(now.year(), 1, 1)
let time = setup_datetime();
let start_of_year = NaiveDate::from_ymd_opt(time.year(), 1, 1)
.unwrap()
.and_hms_opt(0, 0, 0)
.unwrap();
let expected = format(start_of_year + TimeDelta::days(30) + TimeDelta::hours(5));
let result = exec_datetime(
&[
text("now"),
text("2023-06-15 12:30:45"),
text("start of year"),
text("+30 days"),
text("+5 hours"),
@@ -1488,33 +1498,36 @@ mod tests {
assert_eq!(result, text(&expected));
}
/// Test 'localtime' and 'utc' modifiers
#[test]
fn test_localtime_and_utc_modifiers() {
let local = chrono::Local::now().naive_local();
let expected = format(local);
let result = exec_datetime(&[text("now"), text("localtime")], DateTimeOutput::DateTime);
assert_eq!(result, text(&expected));
let utc = Utc::now().naive_utc();
let expected_utc = format(utc);
let result_utc = exec_datetime(
&[text(&local.to_string()), text("utc")],
fn test_timezone_modifiers() {
let dt = setup_datetime();
let result_local = exec_datetime(
&[text("2023-06-15 12:30:45"), text("localtime")],
DateTimeOutput::DateTime,
);
assert_eq!(result_utc, text(&expected_utc));
assert_eq!(
result_local,
text(
&dt.and_utc()
.with_timezone(&chrono::Local)
.format("%Y-%m-%d %H:%M:%S")
.to_string()
)
);
// TODO: utc modifier assumes time given is not already utc
// add test when fixed in the future
}
#[test]
fn test_combined_modifiers() {
let now = Utc::now().naive_utc();
let expected = now - TimeDelta::days(1)
let time = create_datetime(2000, 1, 1, 0, 0, 0);
let expected = time - TimeDelta::days(1)
+ TimeDelta::hours(5)
+ TimeDelta::minutes(30)
+ TimeDelta::seconds(15);
let result = exec_datetime(
&[
text("now"),
text("2000-01-01 00:00:00"),
text("-1 day"),
text("+5 hours"),
text("+30 minutes"),
@@ -1523,16 +1536,10 @@ mod tests {
],
DateTimeOutput::DateTime,
);
let tolerance = TimeDelta::milliseconds(1);
let result =
chrono::NaiveDateTime::parse_from_str(&result.to_string(), "%Y-%m-%d %H:%M:%S%.3f")
.unwrap();
assert!(
(result - expected).num_milliseconds().abs() <= tolerance.num_milliseconds(),
"Expected: {}, Actual: {}",
expected,
result
);
assert_eq!(expected, result);
}
#[test]

View File

@@ -1044,6 +1044,33 @@ pub fn insn_to_str(
0,
"".to_string(),
),
Insn::ShiftRight { lhs, rhs, dest } => (
"ShiftRight",
*rhs as i32,
*lhs as i32,
*dest as i32,
OwnedValue::build_text(Rc::new("".to_string())),
0,
format!("r[{}]=r[{}] >> r[{}]", dest, lhs, rhs),
),
Insn::ShiftLeft { lhs, rhs, dest } => (
"ShiftLeft",
*rhs as i32,
*lhs as i32,
*dest as i32,
OwnedValue::build_text(Rc::new("".to_string())),
0,
format!("r[{}]=r[{}] << r[{}]", dest, lhs, rhs),
),
Insn::Variable { index, dest } => (
"Variable",
usize::from(*index) as i32,
*dest as i32,
0,
OwnedValue::build_text(Rc::new("".to_string())),
0,
format!("r[{}]=parameter({})", *dest, *index),
),
};
format!(
"{:<4} {:<17} {:<4} {:<4} {:<4} {:<13} {:<2} {}",

View File

@@ -1,3 +1,5 @@
use std::num::NonZero;
use super::{AggFunc, BranchOffset, CursorID, FuncCtx, PageIdx};
use crate::types::{OwnedRecord, OwnedValue};
use limbo_macros::Description;
@@ -487,6 +489,26 @@ pub enum Insn {
db: usize,
where_clause: String,
},
// Place the result of lhs >> rhs in dest register.
ShiftRight {
lhs: usize,
rhs: usize,
dest: usize,
},
// Place the result of lhs << rhs in dest register.
ShiftLeft {
lhs: usize,
rhs: usize,
dest: usize,
},
/// Get parameter variable.
Variable {
index: NonZero<usize>,
dest: usize,
},
}
fn cast_text_to_numerical(value: &str) -> OwnedValue {
@@ -720,3 +742,99 @@ pub fn exec_bit_not(mut reg: &OwnedValue) -> OwnedValue {
_ => todo!(),
}
}
pub fn exec_shift_left(mut lhs: &OwnedValue, mut rhs: &OwnedValue) -> OwnedValue {
if let OwnedValue::Agg(agg) = lhs {
lhs = agg.final_value();
}
if let OwnedValue::Agg(agg) = rhs {
rhs = agg.final_value();
}
match (lhs, rhs) {
(OwnedValue::Null, _) | (_, OwnedValue::Null) => OwnedValue::Null,
(OwnedValue::Integer(lh), OwnedValue::Integer(rh)) => {
OwnedValue::Integer(compute_shl(*lh, *rh))
}
(OwnedValue::Float(lh), OwnedValue::Integer(rh)) => {
OwnedValue::Integer(compute_shl(*lh as i64, *rh))
}
(OwnedValue::Integer(lh), OwnedValue::Float(rh)) => {
OwnedValue::Integer(compute_shl(*lh, *rh as i64))
}
(OwnedValue::Float(lh), OwnedValue::Float(rh)) => {
OwnedValue::Integer(compute_shl(*lh as i64, *rh as i64))
}
(OwnedValue::Text(lhs), OwnedValue::Text(rhs)) => exec_shift_left(
&cast_text_to_numerical(&lhs.value),
&cast_text_to_numerical(&rhs.value),
),
(OwnedValue::Text(text), other) => {
exec_shift_left(&cast_text_to_numerical(&text.value), other)
}
(other, OwnedValue::Text(text)) => {
exec_shift_left(other, &cast_text_to_numerical(&text.value))
}
_ => todo!(),
}
}
fn compute_shl(lhs: i64, rhs: i64) -> i64 {
if rhs == 0 {
lhs
} else if rhs >= 64 || rhs <= -64 {
0
} else if rhs < 0 {
// if negative do right shift
lhs >> (-rhs)
} else {
lhs << rhs
}
}
pub fn exec_shift_right(mut lhs: &OwnedValue, mut rhs: &OwnedValue) -> OwnedValue {
if let OwnedValue::Agg(agg) = lhs {
lhs = agg.final_value();
}
if let OwnedValue::Agg(agg) = rhs {
rhs = agg.final_value();
}
match (lhs, rhs) {
(OwnedValue::Null, _) | (_, OwnedValue::Null) => OwnedValue::Null,
(OwnedValue::Integer(lh), OwnedValue::Integer(rh)) => {
OwnedValue::Integer(compute_shr(*lh, *rh))
}
(OwnedValue::Float(lh), OwnedValue::Integer(rh)) => {
OwnedValue::Integer(compute_shr(*lh as i64, *rh))
}
(OwnedValue::Integer(lh), OwnedValue::Float(rh)) => {
OwnedValue::Integer(compute_shr(*lh, *rh as i64))
}
(OwnedValue::Float(lh), OwnedValue::Float(rh)) => {
OwnedValue::Integer(compute_shr(*lh as i64, *rh as i64))
}
(OwnedValue::Text(lhs), OwnedValue::Text(rhs)) => exec_shift_right(
&cast_text_to_numerical(&lhs.value),
&cast_text_to_numerical(&rhs.value),
),
(OwnedValue::Text(text), other) => {
exec_shift_right(&cast_text_to_numerical(&text.value), other)
}
(other, OwnedValue::Text(text)) => {
exec_shift_right(other, &cast_text_to_numerical(&text.value))
}
_ => todo!(),
}
}
fn compute_shr(lhs: i64, rhs: i64) -> i64 {
if rhs == 0 {
lhs
} else if rhs >= 64 || rhs <= -64 {
0
} else if rhs < 0 {
// if negative do left shift
lhs << (-rhs)
} else {
lhs >> rhs
}
}

View File

@@ -25,8 +25,7 @@ pub mod likeop;
pub mod sorter;
use crate::error::{LimboError, SQLITE_CONSTRAINT_PRIMARYKEY};
#[cfg(feature = "uuid")]
use crate::ext::{exec_ts_from_uuid7, exec_uuid, exec_uuidblob, exec_uuidstr, ExtFunc, UuidFunc};
use crate::ext::ExtValue;
use crate::function::{AggFunc, FuncCtx, MathFunc, MathFuncArity, ScalarFunc};
use crate::pseudo::PseudoCursor;
use crate::result::LimboResult;
@@ -42,20 +41,21 @@ use crate::{
json::json_arrow_extract, json::json_arrow_shift_extract, json::json_error_position,
json::json_extract, json::json_object, json::json_type,
};
use crate::{Connection, Result, Rows, TransactionState, DATABASE_VERSION};
use crate::{resolve_ext_path, Connection, Result, Rows, TransactionState, DATABASE_VERSION};
use datetime::{exec_date, exec_datetime_full, exec_julianday, exec_time, exec_unixepoch};
use insn::{
exec_add, exec_bit_and, exec_bit_not, exec_bit_or, exec_divide, exec_multiply, exec_remainder,
exec_subtract,
exec_shift_left, exec_shift_right, exec_subtract,
};
use likeop::{construct_like_escape_arg, exec_glob, exec_like_with_escape};
use rand::distributions::{Distribution, Uniform};
use rand::{thread_rng, Rng};
use regex::{Regex, RegexBuilder};
use sorter::Sorter;
use std::borrow::{Borrow, BorrowMut};
use std::borrow::BorrowMut;
use std::cell::RefCell;
use std::collections::{BTreeMap, HashMap};
use std::num::NonZero;
use std::rc::{Rc, Weak};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -147,6 +147,33 @@ macro_rules! return_if_io {
};
}
macro_rules! call_external_function {
(
$func_ptr:expr,
$dest_register:expr,
$state:expr,
$arg_count:expr,
$start_reg:expr
) => {{
if $arg_count == 0 {
let result_c_value: ExtValue = ($func_ptr)(0, std::ptr::null());
let result_ov = OwnedValue::from_ffi(&result_c_value);
$state.registers[$dest_register] = result_ov;
} else {
let register_slice = &$state.registers[$start_reg..$start_reg + $arg_count];
let mut ext_values: Vec<ExtValue> = Vec::with_capacity($arg_count);
for ov in register_slice.iter() {
let val = ov.to_ffi();
ext_values.push(val);
}
let argv_ptr = ext_values.as_ptr();
let result_c_value: ExtValue = ($func_ptr)($arg_count as i32, argv_ptr);
let result_ov = OwnedValue::from_ffi(&result_c_value);
$state.registers[$dest_register] = result_ov;
}
}};
}
struct RegexCache {
like: HashMap<String, Regex>,
glob: HashMap<String, Regex>,
@@ -174,6 +201,7 @@ pub struct ProgramState {
ended_coroutine: HashMap<usize, bool>, // flag to indicate that a coroutine has ended (key is the yield register)
regex_cache: RegexCache,
interrupted: bool,
parameters: HashMap<NonZero<usize>, OwnedValue>,
}
impl ProgramState {
@@ -196,6 +224,7 @@ impl ProgramState {
ended_coroutine: HashMap::new(),
regex_cache: RegexCache::new(),
interrupted: false,
parameters: HashMap::new(),
}
}
@@ -214,6 +243,18 @@ impl ProgramState {
pub fn is_interrupted(&self) -> bool {
self.interrupted
}
pub fn bind_at(&mut self, index: NonZero<usize>, value: OwnedValue) {
self.parameters.insert(index, value);
}
pub fn get_parameter(&self, index: NonZero<usize>) -> Option<&OwnedValue> {
self.parameters.get(&index)
}
pub fn reset(&mut self) {
self.parameters.clear();
}
}
macro_rules! must_be_btree_cursor {
@@ -236,6 +277,7 @@ pub struct Program {
pub cursor_ref: Vec<(Option<String>, CursorType)>,
pub database_header: Rc<RefCell<DatabaseHeader>>,
pub comments: HashMap<InsnReference, &'static str>,
pub parameters: crate::parameters::Parameters,
pub connection: Weak<Connection>,
pub auto_commit: bool,
}
@@ -1459,99 +1501,91 @@ impl Program {
let arg_count = func.arg_count;
match &func.func {
#[cfg(feature = "json")]
crate::function::Func::Json(JsonFunc::Json) => {
let json_value = &state.registers[*start_reg];
let json_str = get_json(json_value);
match json_str {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
#[cfg(feature = "json")]
crate::function::Func::Json(
func @ (JsonFunc::JsonArray | JsonFunc::JsonObject),
) => {
let reg_values = &state.registers[*start_reg..*start_reg + arg_count];
let func = match func {
JsonFunc::JsonArray => json_array,
JsonFunc::JsonObject => json_object,
_ => unreachable!(),
};
let json_result = func(reg_values);
match json_result {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
#[cfg(feature = "json")]
crate::function::Func::Json(JsonFunc::JsonExtract) => {
let result = match arg_count {
0 => json_extract(&OwnedValue::Null, &[]),
_ => {
let val = &state.registers[*start_reg];
let reg_values =
&state.registers[*start_reg + 1..*start_reg + arg_count];
json_extract(val, reg_values)
crate::function::Func::Json(json_func) => match json_func {
JsonFunc::Json => {
let json_value = &state.registers[*start_reg];
let json_str = get_json(json_value);
match json_str {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
};
}
JsonFunc::JsonArray | JsonFunc::JsonObject => {
let reg_values =
&state.registers[*start_reg..*start_reg + arg_count];
match result {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
#[cfg(feature = "json")]
crate::function::Func::Json(
func @ (JsonFunc::JsonArrowExtract | JsonFunc::JsonArrowShiftExtract),
) => {
assert_eq!(arg_count, 2);
let json = &state.registers[*start_reg];
let path = &state.registers[*start_reg + 1];
let func = match func {
JsonFunc::JsonArrowExtract => json_arrow_extract,
JsonFunc::JsonArrowShiftExtract => json_arrow_shift_extract,
_ => unreachable!(),
};
let json_str = func(json, path);
match json_str {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
#[cfg(feature = "json")]
crate::function::Func::Json(
func @ (JsonFunc::JsonArrayLength | JsonFunc::JsonType),
) => {
let json_value = &state.registers[*start_reg];
let path_value = if arg_count > 1 {
Some(&state.registers[*start_reg + 1])
} else {
None
};
let func_result = match func {
JsonFunc::JsonArrayLength => {
json_array_length(json_value, path_value)
let func = match func {
JsonFunc::JsonArray => json_array,
JsonFunc::JsonObject => json_object,
_ => unreachable!(),
};
let json_result = func(reg_values);
match json_result {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
JsonFunc::JsonType => json_type(json_value, path_value),
_ => unreachable!(),
};
}
JsonFunc::JsonExtract => {
let result = match arg_count {
0 => json_extract(&OwnedValue::Null, &[]),
_ => {
let val = &state.registers[*start_reg];
let reg_values = &state.registers
[*start_reg + 1..*start_reg + arg_count];
match func_result {
Ok(result) => state.registers[*dest] = result,
Err(e) => return Err(e),
json_extract(val, reg_values)
}
};
match result {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
}
#[cfg(feature = "json")]
crate::function::Func::Json(JsonFunc::JsonErrorPosition) => {
let json_value = &state.registers[*start_reg];
match json_error_position(json_value) {
Ok(pos) => state.registers[*dest] = pos,
Err(e) => return Err(e),
JsonFunc::JsonArrowExtract | JsonFunc::JsonArrowShiftExtract => {
assert_eq!(arg_count, 2);
let json = &state.registers[*start_reg];
let path = &state.registers[*start_reg + 1];
let json_func = match json_func {
JsonFunc::JsonArrowExtract => json_arrow_extract,
JsonFunc::JsonArrowShiftExtract => json_arrow_shift_extract,
_ => unreachable!(),
};
let json_str = json_func(json, path);
match json_str {
Ok(json) => state.registers[*dest] = json,
Err(e) => return Err(e),
}
}
}
JsonFunc::JsonArrayLength | JsonFunc::JsonType => {
let json_value = &state.registers[*start_reg];
let path_value = if arg_count > 1 {
Some(&state.registers[*start_reg + 1])
} else {
None
};
let func_result = match json_func {
JsonFunc::JsonArrayLength => {
json_array_length(json_value, path_value)
}
JsonFunc::JsonType => json_type(json_value, path_value),
_ => unreachable!(),
};
match func_result {
Ok(result) => state.registers[*dest] = result,
Err(e) => return Err(e),
}
}
JsonFunc::JsonErrorPosition => {
let json_value = &state.registers[*start_reg];
match json_error_position(json_value) {
Ok(pos) => state.registers[*dest] = pos,
Err(e) => return Err(e),
}
}
},
crate::function::Func::Scalar(scalar_func) => match scalar_func {
ScalarFunc::Cast => {
assert!(arg_count == 2);
@@ -1850,43 +1884,17 @@ impl Program {
let replacement = &state.registers[*start_reg + 2];
state.registers[*dest] = exec_replace(source, pattern, replacement);
}
},
#[allow(unreachable_patterns)]
crate::function::Func::Extension(extfn) => match extfn {
#[cfg(feature = "uuid")]
ExtFunc::Uuid(uuidfn) => match uuidfn {
UuidFunc::Uuid4Str => {
state.registers[*dest] = exec_uuid(uuidfn, None)?
#[cfg(not(target_family = "wasm"))]
ScalarFunc::LoadExtension => {
let extension = &state.registers[*start_reg];
let ext = resolve_ext_path(&extension.to_string())?;
if let Some(conn) = self.connection.upgrade() {
conn.load_extension(ext)?;
}
UuidFunc::Uuid7 => match arg_count {
0 => {
state.registers[*dest] =
exec_uuid(uuidfn, None).unwrap_or(OwnedValue::Null);
}
1 => {
let reg_value = state.registers[*start_reg].borrow();
state.registers[*dest] = exec_uuid(uuidfn, Some(reg_value))
.unwrap_or(OwnedValue::Null);
}
_ => unreachable!(),
},
_ => {
// remaining accept 1 arg
let reg_value = state.registers[*start_reg].borrow();
state.registers[*dest] = match uuidfn {
UuidFunc::Uuid7TS => Some(exec_ts_from_uuid7(reg_value)),
UuidFunc::UuidStr => exec_uuidstr(reg_value).ok(),
UuidFunc::UuidBlob => exec_uuidblob(reg_value).ok(),
_ => unreachable!(),
}
.unwrap_or(OwnedValue::Null);
}
},
_ => unreachable!(), // when more extension types are added
}
},
crate::function::Func::External(f) => {
let result = (f.func)(&[])?;
state.registers[*dest] = result;
call_external_function! {f.func, *dest, state, arg_count, *start_reg };
}
crate::function::Func::Math(math_func) => match math_func.arity() {
MathFuncArity::Nullary => match math_func {
@@ -2051,11 +2059,25 @@ impl Program {
state.pc += 1;
}
Insn::MustBeInt { reg } => {
match state.registers[*reg] {
match &state.registers[*reg] {
OwnedValue::Integer(_) => {}
OwnedValue::Float(f) => match cast_real_to_integer(*f) {
Ok(i) => state.registers[*reg] = OwnedValue::Integer(i),
Err(_) => crate::bail_parse_error!(
"MustBeInt: the value in register cannot be cast to integer"
),
},
OwnedValue::Text(text) => match checked_cast_text_to_numeric(&text.value) {
Ok(OwnedValue::Integer(i)) => {
state.registers[*reg] = OwnedValue::Integer(i)
}
_ => crate::bail_parse_error!(
"MustBeInt: the value in register cannot be cast to integer"
),
},
_ => {
crate::bail_parse_error!(
"MustBeInt: the value in the register is not an integer"
"MustBeInt: the value in register cannot be cast to integer"
);
}
};
@@ -2171,6 +2193,23 @@ impl Program {
parse_schema_rows(Some(rows), &mut schema, conn.pager.io.clone())?;
state.pc += 1;
}
Insn::ShiftRight { lhs, rhs, dest } => {
state.registers[*dest] =
exec_shift_right(&state.registers[*lhs], &state.registers[*rhs]);
state.pc += 1;
}
Insn::ShiftLeft { lhs, rhs, dest } => {
state.registers[*dest] =
exec_shift_left(&state.registers[*lhs], &state.registers[*rhs]);
state.pc += 1;
}
Insn::Variable { index, dest } => {
state.registers[*dest] = state
.get_parameter(*index)
.ok_or(LimboError::Unbound(*index))?
.clone();
state.pc += 1;
}
}
}
}
@@ -3082,23 +3121,38 @@ fn cast_text_to_real(text: &str) -> OwnedValue {
/// IEEE 754 64-bit float and thus provides a 1-bit of margin for the text-to-float conversion operation.)
/// Any text input that describes a value outside the range of a 64-bit signed integer yields a REAL result.
/// Casting a REAL or INTEGER value to NUMERIC is a no-op, even if a real value could be losslessly converted to an integer.
fn cast_text_to_numeric(text: &str) -> OwnedValue {
fn checked_cast_text_to_numeric(text: &str) -> std::result::Result<OwnedValue, ()> {
if !text.contains('.') && !text.contains('e') && !text.contains('E') {
// Looks like an integer
if let Ok(i) = text.parse::<i64>() {
return OwnedValue::Integer(i);
return Ok(OwnedValue::Integer(i));
}
}
// Try as float
if let Ok(f) = text.parse::<f64>() {
// Check if can be losslessly converted to 51-bit integer
let i = f as i64;
if f == i as f64 && i.abs() < (1i64 << 51) {
return OwnedValue::Integer(i);
}
return OwnedValue::Float(f);
return match cast_real_to_integer(f) {
Ok(i) => Ok(OwnedValue::Integer(i)),
Err(_) => Ok(OwnedValue::Float(f)),
};
}
OwnedValue::Integer(0)
Err(())
}
// try casting to numeric if not possible return integer 0
fn cast_text_to_numeric(text: &str) -> OwnedValue {
match checked_cast_text_to_numeric(text) {
Ok(value) => value,
Err(_) => OwnedValue::Integer(0),
}
}
// Check if float can be losslessly converted to 51-bit integer
fn cast_real_to_integer(float: f64) -> std::result::Result<i64, ()> {
let i = float as i64;
if float == i as f64 && i.abs() < (1i64 << 51) {
return Ok(i);
}
Err(())
}
fn execute_sqlite_version(version_integer: i64) -> String {

View File

@@ -0,0 +1,16 @@
[package]
name = "limbo_uuid"
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
[lib]
crate-type = ["cdylib", "lib"]
[dependencies]
limbo_extension = { path = "../../limbo_extension"}
uuid = { version = "1.11.0", features = ["v4", "v7"] }
log = "0.4.20"

151
extensions/uuid/src/lib.rs Normal file
View File

@@ -0,0 +1,151 @@
use limbo_extension::{
export_scalar, register_extension, register_scalar_functions, Value, ValueType,
};
register_extension! {
scalars: {
"uuid4_str" => uuid4_str,
"uuid4" => uuid4_blob,
"uuid7_str" => uuid7_str,
"uuid7" => uuid7_blob,
"uuid_str" => uuid_str,
"uuid_blob" => uuid_blob,
"uuid7_timestamp_ms" => exec_ts_from_uuid7,
"gen_random_uuid" => uuid4_str,
},
}
#[export_scalar]
#[args(0)]
fn uuid4_str(_args: &[Value]) -> Value {
let uuid = uuid::Uuid::new_v4().to_string();
Value::from_text(uuid)
}
#[export_scalar]
#[args(0)]
fn uuid4_blob(_args: &[Value]) -> Value {
let uuid = uuid::Uuid::new_v4();
let bytes = uuid.as_bytes();
Value::from_blob(bytes.to_vec())
}
#[export_scalar]
#[args(0..=1)]
fn uuid7_str(args: &[Value]) -> Value {
let timestamp = if args.is_empty() {
let ctx = uuid::ContextV7::new();
uuid::Timestamp::now(ctx)
} else {
let arg = &args[0];
match arg.value_type() {
ValueType::Integer => {
let ctx = uuid::ContextV7::new();
let Some(int) = arg.to_integer() else {
return Value::null();
};
uuid::Timestamp::from_unix(ctx, int as u64, 0)
}
ValueType::Text => {
let Some(text) = arg.to_text() else {
return Value::null();
};
match text.parse::<i64>() {
Ok(unix) => {
if unix <= 0 {
return Value::null();
}
uuid::Timestamp::from_unix(uuid::ContextV7::new(), unix as u64, 0)
}
Err(_) => return Value::null(),
}
}
_ => return Value::null(),
}
};
let uuid = uuid::Uuid::new_v7(timestamp);
Value::from_text(uuid.to_string())
}
#[export_scalar]
#[args(0..=1)]
fn uuid7_blob(args: &[Value]) -> Value {
let timestamp = if args.is_empty() {
let ctx = uuid::ContextV7::new();
uuid::Timestamp::now(ctx)
} else if args[0].value_type() == limbo_extension::ValueType::Integer {
let ctx = uuid::ContextV7::new();
let Some(int) = args[0].to_integer() else {
return Value::null();
};
uuid::Timestamp::from_unix(ctx, int as u64, 0)
} else {
return Value::null();
};
let uuid = uuid::Uuid::new_v7(timestamp);
let bytes = uuid.as_bytes();
Value::from_blob(bytes.to_vec())
}
#[export_scalar]
#[args(1)]
fn exec_ts_from_uuid7(args: &[Value]) -> Value {
match args[0].value_type() {
ValueType::Blob => {
let Some(blob) = &args[0].to_blob() else {
return Value::null();
};
let uuid = uuid::Uuid::from_slice(blob.as_slice()).unwrap();
let unix = uuid_to_unix(uuid.as_bytes());
Value::from_integer(unix as i64)
}
ValueType::Text => {
let Some(text) = args[0].to_text() else {
return Value::null();
};
let Ok(uuid) = uuid::Uuid::parse_str(&text) else {
return Value::null();
};
let unix = uuid_to_unix(uuid.as_bytes());
Value::from_integer(unix as i64)
}
_ => Value::null(),
}
}
#[export_scalar]
#[args(1)]
fn uuid_str(args: &[Value]) -> Value {
let Some(blob) = args[0].to_blob() else {
return Value::null();
};
let parsed = uuid::Uuid::from_slice(blob.as_slice())
.ok()
.map(|u| u.to_string());
match parsed {
Some(s) => Value::from_text(s),
None => Value::null(),
}
}
#[export_scalar]
#[args(1)]
fn uuid_blob(args: &[Value]) -> Value {
let Some(text) = args[0].to_text() else {
return Value::null();
};
match uuid::Uuid::parse_str(&text) {
Ok(uuid) => Value::from_blob(uuid.as_bytes().to_vec()),
Err(_) => Value::null(),
}
}
#[inline(always)]
fn uuid_to_unix(uuid: &[u8; 16]) -> u64 {
((uuid[0] as u64) << 40)
| ((uuid[1] as u64) << 32)
| ((uuid[2] as u64) << 24)
| ((uuid[3] as u64) << 16)
| ((uuid[4] as u64) << 8)
| (uuid[5] as u64)
}

View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,176 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

View File

@@ -20,4 +20,4 @@ SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
[package]
name = "limbo_extension"
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
[dependencies]
log = "0.4.20"
limbo_macros = { path = "../macros" }

266
limbo_extension/src/lib.rs Normal file
View File

@@ -0,0 +1,266 @@
use std::os::raw::{c_char, c_void};
pub type ResultCode = i32;
pub use limbo_macros::export_scalar;
pub const RESULT_OK: ResultCode = 0;
pub const RESULT_ERROR: ResultCode = 1;
// TODO: more error types
pub type ExtensionEntryPoint = extern "C" fn(api: *const ExtensionApi) -> ResultCode;
pub type ScalarFunction = extern "C" fn(argc: i32, *const Value) -> Value;
#[repr(C)]
pub struct ExtensionApi {
pub ctx: *mut c_void,
pub register_scalar_function:
extern "C" fn(ctx: *mut c_void, name: *const c_char, func: ScalarFunction) -> ResultCode,
}
#[macro_export]
macro_rules! register_extension {
(
scalars: { $( $scalar_name:expr => $scalar_func:ident ),* $(,)? },
//aggregates: { $( $agg_name:expr => ($step_func:ident, $finalize_func:ident) ),* $(,)? },
//virtual_tables: { $( $vt_name:expr => $vt_impl:expr ),* $(,)? }
) => {
#[no_mangle]
pub unsafe extern "C" fn register_extension(api: *const $crate::ExtensionApi) -> $crate::ResultCode {
if api.is_null() {
return $crate::RESULT_ERROR;
}
register_scalar_functions! { api, $( $scalar_name => $scalar_func ),* }
// TODO:
//register_aggregate_functions! { $( $agg_name => ($step_func, $finalize_func) ),* }
//register_virtual_tables! { $( $vt_name => $vt_impl ),* }
$crate::RESULT_OK
}
}
}
#[macro_export]
macro_rules! register_scalar_functions {
( $api:expr, $( $fname:expr => $fptr:ident ),* ) => {
unsafe {
$(
let cname = std::ffi::CString::new($fname).unwrap();
((*$api).register_scalar_function)((*$api).ctx, cname.as_ptr(), $fptr);
)*
}
}
}
#[repr(C)]
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum ValueType {
Null,
Integer,
Float,
Text,
Blob,
}
#[repr(C)]
pub struct Value {
value_type: ValueType,
value: *mut c_void,
}
impl std::fmt::Debug for Value {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.value_type {
ValueType::Null => write!(f, "Value {{ Null }}"),
ValueType::Integer => write!(f, "Value {{ Integer: {} }}", unsafe {
*(self.value as *const i64)
}),
ValueType::Float => write!(f, "Value {{ Float: {} }}", unsafe {
*(self.value as *const f64)
}),
ValueType::Text => write!(f, "Value {{ Text: {:?} }}", unsafe {
&*(self.value as *const TextValue)
}),
ValueType::Blob => write!(f, "Value {{ Blob: {:?} }}", unsafe {
&*(self.value as *const Blob)
}),
}
}
}
#[repr(C)]
pub struct TextValue {
text: *const u8,
len: u32,
}
impl std::fmt::Debug for TextValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"TextValue {{ text: {:?}, len: {} }}",
self.text, self.len
)
}
}
impl Default for TextValue {
fn default() -> Self {
Self {
text: std::ptr::null(),
len: 0,
}
}
}
impl TextValue {
pub(crate) fn new(text: *const u8, len: usize) -> Self {
Self {
text,
len: len as u32,
}
}
fn as_str(&self) -> &str {
if self.text.is_null() {
return "";
}
unsafe {
std::str::from_utf8_unchecked(std::slice::from_raw_parts(self.text, self.len as usize))
}
}
}
#[repr(C)]
pub struct Blob {
data: *const u8,
size: u64,
}
impl std::fmt::Debug for Blob {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Blob {{ data: {:?}, size: {} }}", self.data, self.size)
}
}
impl Blob {
pub fn new(data: *const u8, size: u64) -> Self {
Self { data, size }
}
}
impl Value {
pub fn null() -> Self {
Self {
value_type: ValueType::Null,
value: std::ptr::null_mut(),
}
}
pub fn value_type(&self) -> ValueType {
self.value_type
}
pub fn to_float(&self) -> Option<f64> {
if self.value_type != ValueType::Float {
return None;
}
if self.value.is_null() {
return None;
}
Some(unsafe { *(self.value as *const f64) })
}
pub fn to_text(&self) -> Option<String> {
if self.value_type != ValueType::Text {
return None;
}
if self.value.is_null() {
return None;
}
let txt = unsafe { &*(self.value as *const TextValue) };
Some(String::from(txt.as_str()))
}
pub fn to_blob(&self) -> Option<Vec<u8>> {
if self.value_type != ValueType::Blob {
return None;
}
if self.value.is_null() {
return None;
}
let blob = unsafe { &*(self.value as *const Blob) };
let slice = unsafe { std::slice::from_raw_parts(blob.data, blob.size as usize) };
Some(slice.to_vec())
}
pub fn to_integer(&self) -> Option<i64> {
if self.value_type != ValueType::Integer {
return None;
}
if self.value.is_null() {
return None;
}
Some(unsafe { *(self.value as *const i64) })
}
pub fn from_integer(value: i64) -> Self {
let boxed = Box::new(value);
Self {
value_type: ValueType::Integer,
value: Box::into_raw(boxed) as *mut c_void,
}
}
pub fn from_float(value: f64) -> Self {
let boxed = Box::new(value);
Self {
value_type: ValueType::Float,
value: Box::into_raw(boxed) as *mut c_void,
}
}
pub fn from_text(s: String) -> Self {
let buffer = s.into_boxed_str();
let ptr = buffer.as_ptr();
let len = buffer.len();
std::mem::forget(buffer);
let text_value = TextValue::new(ptr, len);
let text_box = Box::new(text_value);
Self {
value_type: ValueType::Text,
value: Box::into_raw(text_box) as *mut c_void,
}
}
pub fn from_blob(value: Vec<u8>) -> Self {
let boxed = Box::new(Blob::new(value.as_ptr(), value.len() as u64));
std::mem::forget(value);
Self {
value_type: ValueType::Blob,
value: Box::into_raw(boxed) as *mut c_void,
}
}
/// # Safety
/// consumes the value while freeing the underlying memory with null check.
/// however this does assume that the type was properly constructed with
/// the appropriate value_type and value.
pub unsafe fn free(self) {
if self.value.is_null() {
return;
}
match self.value_type {
ValueType::Integer => {
let _ = Box::from_raw(self.value as *mut i64);
}
ValueType::Float => {
let _ = Box::from_raw(self.value as *mut f64);
}
ValueType::Text => {
let _ = Box::from_raw(self.value as *mut TextValue);
}
ValueType::Blob => {
let _ = Box::from_raw(self.value as *mut Blob);
}
ValueType::Null => {}
}
}
}

View File

@@ -11,3 +11,8 @@ description = "The Limbo database library"
[lib]
proc-macro = true
[dependencies]
quote = "1.0.38"
proc-macro2 = "1.0.38"
syn = { version = "2.0.96", features = ["full"]}

63
macros/src/args.rs Normal file
View File

@@ -0,0 +1,63 @@
use syn::parse::{Parse, ParseStream, Result as ParseResult};
use syn::{LitInt, Token};
#[derive(Debug)]
pub enum ArgsSpec {
Exact(i32),
Range {
lower: i32,
upper: i32,
inclusive: bool,
},
}
pub struct ArgsAttr {
pub spec: ArgsSpec,
}
impl Parse for ArgsAttr {
fn parse(input: ParseStream) -> ParseResult<Self> {
if input.peek(LitInt) {
let start_lit = input.parse::<LitInt>()?;
let start_val = start_lit.base10_parse::<i32>()?;
if input.is_empty() {
return Ok(ArgsAttr {
spec: ArgsSpec::Exact(start_val),
});
}
if input.peek(Token![..=]) {
let _dots = input.parse::<Token![..=]>()?;
let end_lit = input.parse::<LitInt>()?;
let end_val = end_lit.base10_parse::<i32>()?;
Ok(ArgsAttr {
spec: ArgsSpec::Range {
lower: start_val,
upper: end_val,
inclusive: true,
},
})
} else if input.peek(Token![..]) {
let _dots = input.parse::<Token![..]>()?;
let end_lit = input.parse::<LitInt>()?;
let end_val = end_lit.base10_parse::<i32>()?;
Ok(ArgsAttr {
spec: ArgsSpec::Range {
lower: start_val,
upper: end_val,
inclusive: false,
},
})
} else {
Err(syn::Error::new_spanned(
start_lit,
"Expected '..' or '..=' for a range, or nothing for a single integer.",
))
}
} else {
Err(syn::Error::new(
input.span(),
"Expected an integer or a range expression, like `0`, `0..2`, or `0..=2`.",
))
}
}
}

View File

@@ -1,3 +1,5 @@
mod args;
use args::{ArgsAttr, ArgsSpec};
extern crate proc_macro;
use proc_macro::{token_stream::IntoIter, Group, TokenStream, TokenTree};
use std::collections::HashMap;
@@ -133,3 +135,123 @@ fn generate_get_description(
);
enum_impl.parse().unwrap()
}
use quote::quote;
use syn::{parse_macro_input, Attribute, Block, ItemFn};
/// Macro to transform the preferred API for scalar functions in extensions into
/// an FFI-compatible function signature while validating argc
#[proc_macro_attribute]
pub fn export_scalar(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut input_fn = parse_macro_input!(item as ItemFn);
let fn_name = &input_fn.sig.ident;
let fn_body: &Block = &input_fn.block;
let mut extracted_spec: Option<ArgsSpec> = None;
let mut arg_err = None;
let kept_attrs: Vec<Attribute> = input_fn
.attrs
.into_iter()
.filter_map(|attr| {
if attr.path().is_ident("args") {
let parsed_attr = match attr.parse_args::<ArgsAttr>() {
Ok(p) => p,
Err(err) => {
arg_err = Some(err.to_compile_error());
return None;
}
};
extracted_spec = Some(parsed_attr.spec);
None
} else {
Some(attr)
}
})
.collect();
input_fn.attrs = kept_attrs;
if let Some(arg_err) = arg_err {
return arg_err.into();
}
let spec = match extracted_spec {
Some(s) => s,
None => {
return syn::Error::new_spanned(
fn_name,
"Expected an attribute with integer or range: #[args(1)] #[args(0..2)], etc.",
)
.to_compile_error()
.into()
}
};
let arg_check = match spec {
ArgsSpec::Exact(exact_count) => {
quote! {
if argc != #exact_count {
log::error!(
"{} was called with {} arguments, expected exactly {}",
stringify!(#fn_name),
argc,
#exact_count
);
return ::limbo_extension::Value::null();
}
}
}
ArgsSpec::Range {
lower,
upper,
inclusive: true,
} => {
quote! {
if !(#lower..=#upper).contains(&argc) {
log::error!(
"{} was called with {} arguments, expected {}..={} range",
stringify!(#fn_name),
argc,
#lower,
#upper
);
return ::limbo_extension::Value::null();
}
}
}
ArgsSpec::Range {
lower,
upper,
inclusive: false,
} => {
quote! {
if !(#lower..#upper).contains(&argc) {
log::error!(
"{} was called with {} arguments, expected {}..{} (exclusive)",
stringify!(#fn_name),
argc,
#lower,
#upper
);
return ::limbo_extension::Value::null();
}
}
}
};
let expanded = quote! {
#[export_name = stringify!(#fn_name)]
extern "C" fn #fn_name(argc: i32, argv: *const ::limbo_extension::Value) -> ::limbo_extension::Value {
#arg_check
// from_raw_parts doesn't currently accept null ptr
if argc == 0 || argv.is_null() {
log::debug!("{} was called with no arguments", stringify!(#fn_name));
let args: &[::limbo_extension::Value] = &[];
#fn_body
} else {
let ptr_slice = unsafe {
std::slice::from_raw_parts(argv, argc as usize)
};
let args: &[::limbo_extension::Value] = ptr_slice;
#fn_body
}
}
};
TokenStream::from(expanded)
}

7
scripts/run-sim Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
set -e
while true; do
cargo run -p limbo_sim
done

View File

@@ -4,24 +4,41 @@ use anarchist_readable_name_generator_lib::readable_name_custom;
use rand::{distributions::uniform::SampleUniform, Rng};
pub mod plan;
pub mod property;
pub mod query;
pub mod table;
/// Arbitrary trait for generating random values
/// An implementation of arbitrary is assumed to be a uniform sampling of
/// the possible values of the type, with a bias towards smaller values for
/// practicality.
pub trait Arbitrary {
fn arbitrary<R: Rng>(rng: &mut R) -> Self;
}
/// ArbitraryFrom trait for generating random values from a given value
/// ArbitraryFrom allows for constructing relations, where the generated
/// value is dependent on the given value. These relations could be constraints
/// such as generating an integer within an interval, or a value that fits in a table,
/// or a predicate satisfying a given table row.
pub trait ArbitraryFrom<T> {
fn arbitrary_from<R: Rng>(rng: &mut R, t: &T) -> Self;
fn arbitrary_from<R: Rng>(rng: &mut R, t: T) -> Self;
}
/// Frequency is a helper function for composing different generators with different frequency
/// of occurences.
/// The type signature for the `N` parameter is a bit complex, but it
/// roughly corresponds to a type that can be summed, compared, subtracted and sampled, which are
/// the operations we require for the implementation.
// todo: switch to a simpler type signature that can accomodate all integer and float types, which
// should be enough for our purposes.
pub(crate) fn frequency<
'a,
T,
R: rand::Rng,
N: Sum + PartialOrd + Copy + Default + SampleUniform + SubAssign,
>(
choices: Vec<(N, Box<dyn FnOnce(&mut R) -> T + 'a>)>,
choices: Vec<(N, Box<dyn Fn(&mut R) -> T + 'a>)>,
rng: &mut R,
) -> T {
let total = choices.iter().map(|(weight, _)| *weight).sum::<N>();
@@ -37,6 +54,7 @@ pub(crate) fn frequency<
unreachable!()
}
/// one_of is a helper function for composing different generators with equal probability of occurence.
pub(crate) fn one_of<'a, T, R: rand::Rng>(
choices: Vec<Box<dyn Fn(&mut R) -> T + 'a>>,
rng: &mut R,
@@ -45,15 +63,20 @@ pub(crate) fn one_of<'a, T, R: rand::Rng>(
choices[index](rng)
}
/// pick is a helper function for uniformly picking a random element from a slice
pub(crate) fn pick<'a, T, R: rand::Rng>(choices: &'a [T], rng: &mut R) -> &'a T {
let index = rng.gen_range(0..choices.len());
&choices[index]
}
/// pick_index is typically used for picking an index from a slice to later refer to the element
/// at that index.
pub(crate) fn pick_index<R: rand::Rng>(choices: usize, rng: &mut R) -> usize {
rng.gen_range(0..choices)
}
/// gen_random_text uses `anarchist_readable_name_generator_lib` to generate random
/// readable names for tables, columns, text values etc.
fn gen_random_text<T: Rng>(rng: &mut T) -> String {
let big_text = rng.gen_ratio(1, 1000);
if big_text {

View File

@@ -1,12 +1,10 @@
use std::{fmt::Display, rc::Rc};
use std::{fmt::Display, rc::Rc, vec};
use limbo_core::{Connection, Result, StepResult};
use rand::SeedableRng;
use rand_chacha::ChaCha8Rng;
use crate::{
model::{
query::{Create, Insert, Predicate, Query, Select},
query::{Create, Insert, Query, Select},
table::Value,
},
SimConnection, SimulatorEnv,
@@ -14,25 +12,118 @@ use crate::{
use crate::generation::{frequency, Arbitrary, ArbitraryFrom};
use super::{pick, pick_index};
use super::{
pick,
property::{remaining, Property},
};
pub(crate) type ResultSet = Result<Vec<Vec<Value>>>;
#[derive(Clone)]
pub(crate) struct InteractionPlan {
pub(crate) plan: Vec<Interaction>,
pub(crate) plan: Vec<Interactions>,
}
pub(crate) struct InteractionPlanState {
pub(crate) stack: Vec<ResultSet>,
pub(crate) interaction_pointer: usize,
pub(crate) secondary_pointer: usize,
}
#[derive(Clone)]
pub(crate) enum Interactions {
Property(Property),
Query(Query),
Fault(Fault),
}
impl Interactions {
pub(crate) fn name(&self) -> Option<String> {
match self {
Interactions::Property(property) => Some(property.name()),
Interactions::Query(_) => None,
Interactions::Fault(_) => None,
}
}
pub(crate) fn interactions(&self) -> Vec<Interaction> {
match self {
Interactions::Property(property) => property.interactions(),
Interactions::Query(query) => vec![Interaction::Query(query.clone())],
Interactions::Fault(fault) => vec![Interaction::Fault(fault.clone())],
}
}
}
impl Interactions {
pub(crate) fn dependencies(&self) -> Vec<String> {
match self {
Interactions::Property(property) => {
property
.interactions()
.iter()
.fold(vec![], |mut acc, i| match i {
Interaction::Query(q) => {
acc.extend(q.dependencies());
acc
}
_ => acc,
})
}
Interactions::Query(query) => query.dependencies(),
Interactions::Fault(_) => vec![],
}
}
pub(crate) fn uses(&self) -> Vec<String> {
match self {
Interactions::Property(property) => {
property
.interactions()
.iter()
.fold(vec![], |mut acc, i| match i {
Interaction::Query(q) => {
acc.extend(q.uses());
acc
}
_ => acc,
})
}
Interactions::Query(query) => query.uses(),
Interactions::Fault(_) => vec![],
}
}
}
impl Display for InteractionPlan {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for interaction in &self.plan {
match interaction {
Interaction::Query(query) => writeln!(f, "{};", query)?,
Interaction::Assertion(assertion) => {
writeln!(f, "-- ASSERT: {};", assertion.message)?
for interactions in &self.plan {
match interactions {
Interactions::Property(property) => {
let name = property.name();
writeln!(f, "-- begin testing '{}'", name)?;
for interaction in property.interactions() {
write!(f, "\t")?;
match interaction {
Interaction::Query(query) => writeln!(f, "{};", query)?,
Interaction::Assumption(assumption) => {
writeln!(f, "-- ASSUME: {};", assumption.message)?
}
Interaction::Assertion(assertion) => {
writeln!(f, "-- ASSERT: {};", assertion.message)?
}
Interaction::Fault(fault) => writeln!(f, "-- FAULT: {};", fault)?,
}
}
writeln!(f, "-- end testing '{}'", name)?;
}
Interactions::Fault(fault) => {
writeln!(f, "-- FAULT '{}'", fault)?;
}
Interactions::Query(query) => {
writeln!(f, "{};", query)?;
}
Interaction::Fault(fault) => writeln!(f, "-- FAULT: {};", fault)?,
}
}
@@ -40,7 +131,7 @@ impl Display for InteractionPlan {
}
}
#[derive(Debug)]
#[derive(Debug, Clone, Copy)]
pub(crate) struct InteractionStats {
pub(crate) read_count: usize,
pub(crate) write_count: usize,
@@ -60,6 +151,7 @@ impl Display for InteractionStats {
pub(crate) enum Interaction {
Query(Query),
Assumption(Assertion),
Assertion(Assertion),
Fault(Fault),
}
@@ -68,19 +160,25 @@ impl Display for Interaction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Query(query) => write!(f, "{}", query),
Self::Assumption(assumption) => write!(f, "ASSUME: {}", assumption.message),
Self::Assertion(assertion) => write!(f, "ASSERT: {}", assertion.message),
Self::Fault(fault) => write!(f, "FAULT: {}", fault),
}
}
}
type AssertionFunc = dyn Fn(&Vec<ResultSet>) -> bool;
type AssertionFunc = dyn Fn(&Vec<ResultSet>, &SimulatorEnv) -> Result<bool>;
enum AssertionAST {
Pick(),
}
pub(crate) struct Assertion {
pub(crate) func: Box<AssertionFunc>,
pub(crate) message: String,
}
#[derive(Debug, Clone)]
pub(crate) enum Fault {
Disconnect,
}
@@ -93,47 +191,60 @@ impl Display for Fault {
}
}
pub(crate) struct Interactions(Vec<Interaction>);
impl Interactions {
pub(crate) fn shadow(&self, env: &mut SimulatorEnv) {
for interaction in &self.0 {
match interaction {
Interaction::Query(query) => match query {
Query::Create(create) => {
if !env.tables.iter().any(|t| t.name == create.table.name) {
env.tables.push(create.table.clone());
}
match self {
Interactions::Property(property) => {
for interaction in property.interactions() {
match interaction {
Interaction::Query(query) => match query {
Query::Create(create) => {
if !env.tables.iter().any(|t| t.name == create.table.name) {
env.tables.push(create.table.clone());
}
}
Query::Insert(insert) => {
let table = env
.tables
.iter_mut()
.find(|t| t.name == insert.table)
.unwrap();
table.rows.extend(insert.values.clone());
}
Query::Delete(_) => todo!(),
Query::Select(_) => {}
},
Interaction::Assertion(_) => {}
Interaction::Assumption(_) => {}
Interaction::Fault(_) => {}
}
Query::Insert(insert) => {
let table = env
.tables
.iter_mut()
.find(|t| t.name == insert.table)
.unwrap();
table.rows.extend(insert.values.clone());
}
Query::Delete(_) => todo!(),
Query::Select(_) => {}
},
Interaction::Assertion(_) => {}
Interaction::Fault(_) => {}
}
}
Interactions::Query(query) => match query {
Query::Create(create) => {
if !env.tables.iter().any(|t| t.name == create.table.name) {
env.tables.push(create.table.clone());
}
}
Query::Insert(insert) => {
let table = env
.tables
.iter_mut()
.find(|t| t.name == insert.table)
.unwrap();
table.rows.extend(insert.values.clone());
}
Query::Delete(_) => todo!(),
Query::Select(_) => {}
},
Interactions::Fault(_) => {}
}
}
}
impl InteractionPlan {
pub(crate) fn new() -> Self {
Self {
plan: Vec::new(),
stack: Vec::new(),
interaction_pointer: 0,
}
}
pub(crate) fn push(&mut self, interaction: Interaction) {
self.plan.push(interaction);
Self { plan: Vec::new() }
}
pub(crate) fn stats(&self) -> InteractionStats {
@@ -142,16 +253,27 @@ impl InteractionPlan {
let mut delete = 0;
let mut create = 0;
for interaction in &self.plan {
match interaction {
Interaction::Query(query) => match query {
for interactions in &self.plan {
match interactions {
Interactions::Property(property) => {
for interaction in &property.interactions() {
if let Interaction::Query(query) = interaction {
match query {
Query::Select(_) => read += 1,
Query::Insert(_) => write += 1,
Query::Delete(_) => delete += 1,
Query::Create(_) => create += 1,
}
}
}
}
Interactions::Query(query) => match query {
Query::Select(_) => read += 1,
Query::Insert(_) => write += 1,
Query::Delete(_) => delete += 1,
Query::Create(_) => create += 1,
},
Interaction::Assertion(_) => {}
Interaction::Fault(_) => {}
Interactions::Fault(_) => {}
}
}
@@ -164,25 +286,18 @@ impl InteractionPlan {
}
}
impl ArbitraryFrom<SimulatorEnv> for InteractionPlan {
fn arbitrary_from<R: rand::Rng>(rng: &mut R, env: &SimulatorEnv) -> Self {
impl ArbitraryFrom<&mut SimulatorEnv> for InteractionPlan {
fn arbitrary_from<R: rand::Rng>(rng: &mut R, env: &mut SimulatorEnv) -> Self {
let mut plan = InteractionPlan::new();
let mut env = SimulatorEnv {
opts: env.opts.clone(),
tables: vec![],
connections: vec![],
io: env.io.clone(),
db: env.db.clone(),
rng: ChaCha8Rng::seed_from_u64(rng.next_u64()),
};
let num_interactions = env.opts.max_interactions;
// First create at least one table
let create_query = Create::arbitrary(rng);
env.tables.push(create_query.table.clone());
plan.push(Interaction::Query(Query::Create(create_query)));
plan.plan
.push(Interactions::Query(Query::Create(create_query)));
while plan.plan.len() < num_interactions {
log::debug!(
@@ -190,10 +305,10 @@ impl ArbitraryFrom<SimulatorEnv> for InteractionPlan {
plan.plan.len(),
num_interactions
);
let interactions = Interactions::arbitrary_from(rng, &(&env, plan.stats()));
interactions.shadow(&mut env);
let interactions = Interactions::arbitrary_from(rng, (env, plan.stats()));
interactions.shadow(env);
plan.plan.extend(interactions.0.into_iter());
plan.plan.push(interactions);
}
log::info!("Generated plan with {} interactions", plan.plan.len());
@@ -203,72 +318,77 @@ impl ArbitraryFrom<SimulatorEnv> for InteractionPlan {
impl Interaction {
pub(crate) fn execute_query(&self, conn: &mut Rc<Connection>) -> ResultSet {
match self {
Self::Query(query) => {
let query_str = query.to_string();
let rows = conn.query(&query_str);
if rows.is_err() {
let err = rows.err();
log::error!(
"Error running query '{}': {:?}",
&query_str[0..query_str.len().min(4096)],
err
);
return Err(err.unwrap());
}
let rows = rows.unwrap();
assert!(rows.is_some());
let mut rows = rows.unwrap();
let mut out = Vec::new();
while let Ok(row) = rows.next_row() {
match row {
StepResult::Row(row) => {
let mut r = Vec::new();
for el in &row.values {
let v = match el {
limbo_core::Value::Null => Value::Null,
limbo_core::Value::Integer(i) => Value::Integer(*i),
limbo_core::Value::Float(f) => Value::Float(*f),
limbo_core::Value::Text(t) => Value::Text(t.to_string()),
limbo_core::Value::Blob(b) => Value::Blob(b.to_vec()),
};
r.push(v);
}
out.push(r);
if let Self::Query(query) = self {
let query_str = query.to_string();
let rows = conn.query(&query_str);
if rows.is_err() {
let err = rows.err();
log::debug!(
"Error running query '{}': {:?}",
&query_str[0..query_str.len().min(4096)],
err
);
return Err(err.unwrap());
}
let rows = rows.unwrap();
assert!(rows.is_some());
let mut rows = rows.unwrap();
let mut out = Vec::new();
while let Ok(row) = rows.next_row() {
match row {
StepResult::Row(row) => {
let mut r = Vec::new();
for el in &row.values {
let v = match el {
limbo_core::Value::Null => Value::Null,
limbo_core::Value::Integer(i) => Value::Integer(*i),
limbo_core::Value::Float(f) => Value::Float(*f),
limbo_core::Value::Text(t) => Value::Text(t.to_string()),
limbo_core::Value::Blob(b) => Value::Blob(b.to_vec()),
};
r.push(v);
}
StepResult::IO => {}
StepResult::Interrupt => {}
StepResult::Done => {
break;
}
StepResult::Busy => {}
out.push(r);
}
StepResult::IO => {}
StepResult::Interrupt => {}
StepResult::Done => {
break;
}
StepResult::Busy => {}
}
}
Ok(out)
}
Self::Assertion(_) => {
unreachable!("unexpected: this function should only be called on queries")
}
Interaction::Fault(_) => {
unreachable!("unexpected: this function should only be called on queries")
}
Ok(out)
} else {
unreachable!("unexpected: this function should only be called on queries")
}
}
pub(crate) fn execute_assertion(&self, stack: &Vec<ResultSet>) -> Result<()> {
pub(crate) fn execute_assertion(
&self,
stack: &Vec<ResultSet>,
env: &SimulatorEnv,
) -> Result<()> {
match self {
Self::Query(_) => {
unreachable!("unexpected: this function should only be called on assertions")
}
Self::Assertion(assertion) => {
if !assertion.func.as_ref()(stack) {
return Err(limbo_core::LimboError::InternalError(
let result = assertion.func.as_ref()(stack, env);
match result {
Ok(true) => Ok(()),
Ok(false) => Err(limbo_core::LimboError::InternalError(
assertion.message.clone(),
));
)),
Err(err) => Err(limbo_core::LimboError::InternalError(format!(
"{}. Inner error: {}",
assertion.message, err
))),
}
Ok(())
}
Self::Assumption(_) => {
unreachable!("unexpected: this function should only be called on assertions")
}
Self::Fault(_) => {
unreachable!("unexpected: this function should only be called on assertions")
@@ -276,6 +396,37 @@ impl Interaction {
}
}
pub(crate) fn execute_assumption(
&self,
stack: &Vec<ResultSet>,
env: &SimulatorEnv,
) -> Result<()> {
match self {
Self::Query(_) => {
unreachable!("unexpected: this function should only be called on assumptions")
}
Self::Assertion(_) => {
unreachable!("unexpected: this function should only be called on assumptions")
}
Self::Assumption(assumption) => {
let result = assumption.func.as_ref()(stack, env);
match result {
Ok(true) => Ok(()),
Ok(false) => Err(limbo_core::LimboError::InternalError(
assumption.message.clone(),
)),
Err(err) => Err(limbo_core::LimboError::InternalError(format!(
"{}. Inner error: {}",
assumption.message, err
))),
}
}
Self::Fault(_) => {
unreachable!("unexpected: this function should only be called on assumptions")
}
}
}
pub(crate) fn execute_fault(&self, env: &mut SimulatorEnv, conn_index: usize) -> Result<()> {
match self {
Self::Query(_) => {
@@ -284,6 +435,9 @@ impl Interaction {
Self::Assertion(_) => {
unreachable!("unexpected: this function should only be called on faults")
}
Self::Assumption(_) => {
unreachable!("unexpected: this function should only be called on faults")
}
Self::Fault(fault) => {
match fault {
Fault::Disconnect => {
@@ -306,140 +460,57 @@ impl Interaction {
}
}
fn property_insert_select<R: rand::Rng>(rng: &mut R, env: &SimulatorEnv) -> Interactions {
// Get a random table
let table = pick(&env.tables, rng);
// Pick a random column
let column_index = pick_index(table.columns.len(), rng);
let column = &table.columns[column_index].clone();
// Generate a random value of the column type
let value = Value::arbitrary_from(rng, &column.column_type);
// Create a whole new row
let mut row = Vec::new();
for (i, column) in table.columns.iter().enumerate() {
if i == column_index {
row.push(value.clone());
} else {
let value = Value::arbitrary_from(rng, &column.column_type);
row.push(value);
}
}
// Insert the row
let insert_query = Interaction::Query(Query::Insert(Insert {
table: table.name.clone(),
values: vec![row.clone()],
}));
// Select the row
let select_query = Interaction::Query(Query::Select(Select {
table: table.name.clone(),
predicate: Predicate::Eq(column.name.clone(), value.clone()),
}));
// Check that the row is there
let assertion = Interaction::Assertion(Assertion {
message: format!(
"row [{:?}] not found in table {} after inserting ({} = {})",
row.iter().map(|v| v.to_string()).collect::<Vec<String>>(),
table.name,
column.name,
value,
),
func: Box::new(move |stack: &Vec<ResultSet>| {
let rows = stack.last().unwrap();
match rows {
Ok(rows) => rows.iter().any(|r| r == &row),
Err(_) => false,
}
}),
});
Interactions(vec![insert_query, select_query, assertion])
}
fn property_double_create_failure<R: rand::Rng>(rng: &mut R, _env: &SimulatorEnv) -> Interactions {
let create_query = Create::arbitrary(rng);
let table_name = create_query.table.name.clone();
let cq1 = Interaction::Query(Query::Create(create_query.clone()));
let cq2 = Interaction::Query(Query::Create(create_query.clone()));
let assertion = Interaction::Assertion(Assertion {
message:
"creating two tables with the name should result in a failure for the second query"
.to_string(),
func: Box::new(move |stack: &Vec<ResultSet>| {
let last = stack.last().unwrap();
match last {
Ok(_) => false,
Err(e) => e
.to_string()
.contains(&format!("Table {table_name} already exists")),
}
}),
});
Interactions(vec![cq1, cq2, assertion])
}
fn create_table<R: rand::Rng>(rng: &mut R, _env: &SimulatorEnv) -> Interactions {
let create_query = Interaction::Query(Query::Create(Create::arbitrary(rng)));
Interactions(vec![create_query])
Interactions::Query(Query::Create(Create::arbitrary(rng)))
}
fn random_read<R: rand::Rng>(rng: &mut R, env: &SimulatorEnv) -> Interactions {
let select_query = Interaction::Query(Query::Select(Select::arbitrary_from(rng, &env.tables)));
Interactions(vec![select_query])
Interactions::Query(Query::Select(Select::arbitrary_from(rng, &env.tables)))
}
fn random_write<R: rand::Rng>(rng: &mut R, env: &SimulatorEnv) -> Interactions {
let table = pick(&env.tables, rng);
let insert_query = Interaction::Query(Query::Insert(Insert::arbitrary_from(rng, table)));
Interactions(vec![insert_query])
let insert_query = Query::Insert(Insert::arbitrary_from(rng, table));
Interactions::Query(insert_query)
}
fn random_fault<R: rand::Rng>(_rng: &mut R, _env: &SimulatorEnv) -> Interactions {
let fault = Interaction::Fault(Fault::Disconnect);
Interactions(vec![fault])
Interactions::Fault(Fault::Disconnect)
}
impl ArbitraryFrom<(&SimulatorEnv, InteractionStats)> for Interactions {
fn arbitrary_from<R: rand::Rng>(
rng: &mut R,
(env, stats): &(&SimulatorEnv, InteractionStats),
(env, stats): (&SimulatorEnv, InteractionStats),
) -> Self {
let remaining_read = ((env.opts.max_interactions as f64 * env.opts.read_percent / 100.0)
- (stats.read_count as f64))
.max(0.0);
let remaining_write = ((env.opts.max_interactions as f64 * env.opts.write_percent / 100.0)
- (stats.write_count as f64))
.max(0.0);
let remaining_create = ((env.opts.max_interactions as f64 * env.opts.create_percent
/ 100.0)
- (stats.create_count as f64))
.max(0.0);
let remaining_ = remaining(env, &stats);
frequency(
vec![
(
f64::min(remaining_read, remaining_write),
Box::new(|rng: &mut R| property_insert_select(rng, env)),
f64::min(remaining_.read, remaining_.write) + remaining_.create,
Box::new(|rng: &mut R| {
Interactions::Property(Property::arbitrary_from(rng, (env, &stats)))
}),
),
(
remaining_read,
remaining_.read,
Box::new(|rng: &mut R| random_read(rng, env)),
),
(
remaining_write,
remaining_.write,
Box::new(|rng: &mut R| random_write(rng, env)),
),
(
remaining_create,
remaining_.create,
Box::new(|rng: &mut R| create_table(rng, env)),
),
(1.0, Box::new(|rng: &mut R| random_fault(rng, env))),
(
remaining_create / 2.0,
Box::new(|rng: &mut R| property_double_create_failure(rng, env)),
remaining_
.read
.min(remaining_.write)
.min(remaining_.create)
.max(1.0),
Box::new(|rng: &mut R| random_fault(rng, env)),
),
],
rng,

View File

@@ -0,0 +1,319 @@
use limbo_core::LimboError;
use crate::{
model::{
query::{Create, Delete, Insert, Predicate, Query, Select},
table::Value,
},
runner::env::SimulatorEnv,
};
use super::{
frequency, pick, pick_index,
plan::{Assertion, Interaction, InteractionStats, ResultSet},
ArbitraryFrom,
};
/// Properties are representations of executable specifications
/// about the database behavior.
#[derive(Clone)]
pub(crate) enum Property {
/// Insert-Select is a property in which the inserted row
/// must be in the resulting rows of a select query that has a
/// where clause that matches the inserted row.
/// The execution of the property is as follows
/// INSERT INTO <t> VALUES (...)
/// I_0
/// I_1
/// ...
/// I_n
/// SELECT * FROM <t> WHERE <predicate>
/// The interactions in the middle has the following constraints;
/// - There will be no errors in the middle interactions.
/// - The inserted row will not be deleted.
/// - The inserted row will not be updated.
/// - The table `t` will not be renamed, dropped, or altered.
InsertSelect {
/// The insert query
insert: Insert,
/// Selected row index
row_index: usize,
/// Additional interactions in the middle of the property
queries: Vec<Query>,
/// The select query
select: Select,
},
/// Double Create Failure is a property in which creating
/// the same table twice leads to an error.
/// The execution of the property is as follows
/// CREATE TABLE <t> (...)
/// I_0
/// I_1
/// ...
/// I_n
/// CREATE TABLE <t> (...) -> Error
/// The interactions in the middle has the following constraints;
/// - There will be no errors in the middle interactions.
/// - Table `t` will not be renamed or dropped.
DoubleCreateFailure {
/// The create query
create: Create,
/// Additional interactions in the middle of the property
queries: Vec<Query>,
},
}
impl Property {
pub(crate) fn name(&self) -> String {
match self {
Property::InsertSelect { .. } => "Insert-Select".to_string(),
Property::DoubleCreateFailure { .. } => "Double-Create-Failure".to_string(),
}
}
/// interactions construct a list of interactions, which is an executable representation of the property.
/// the requirement of property -> vec<interaction> conversion emerges from the need to serialize the property,
/// and `interaction` cannot be serialized directly.
pub(crate) fn interactions(&self) -> Vec<Interaction> {
match self {
Property::InsertSelect {
insert,
row_index,
queries,
select,
} => {
// Check that the insert query has at least 1 value
assert!(
!insert.values.is_empty(),
"insert query should have at least 1 value"
);
// Pick a random row within the insert values
let row = insert.values[*row_index].clone();
// Assume that the table exists
let assumption = Interaction::Assumption(Assertion {
message: format!("table {} exists", insert.table),
func: Box::new({
let table_name = insert.table.clone();
move |_: &Vec<ResultSet>, env: &SimulatorEnv| {
Ok(env.tables.iter().any(|t| t.name == table_name))
}
}),
});
let assertion = Interaction::Assertion(Assertion {
message: format!(
// todo: add the part inserting ({} = {})",
"row [{:?}] not found in table {}",
row.iter().map(|v| v.to_string()).collect::<Vec<String>>(),
insert.table,
),
func: Box::new(move |stack: &Vec<ResultSet>, _: &SimulatorEnv| {
let rows = stack.last().unwrap();
match rows {
Ok(rows) => Ok(rows.iter().any(|r| r == &row)),
Err(err) => Err(LimboError::InternalError(err.to_string())),
}
}),
});
let mut interactions = Vec::new();
interactions.push(assumption);
interactions.push(Interaction::Query(Query::Insert(insert.clone())));
interactions.extend(queries.clone().into_iter().map(Interaction::Query));
interactions.push(Interaction::Query(Query::Select(select.clone())));
interactions.push(assertion);
interactions
}
Property::DoubleCreateFailure { create, queries } => {
let table_name = create.table.name.clone();
let assumption = Interaction::Assumption(Assertion {
message: "Double-Create-Failure should not be called on an existing table"
.to_string(),
func: Box::new(move |_: &Vec<ResultSet>, env: &SimulatorEnv| {
Ok(!env.tables.iter().any(|t| t.name == table_name))
}),
});
let cq1 = Interaction::Query(Query::Create(create.clone()));
let cq2 = Interaction::Query(Query::Create(create.clone()));
let table_name = create.table.name.clone();
let assertion = Interaction::Assertion(Assertion {
message:
"creating two tables with the name should result in a failure for the second query"
.to_string(),
func: Box::new(move |stack: &Vec<ResultSet>, _: &SimulatorEnv| {
let last = stack.last().unwrap();
match last {
Ok(_) => Ok(false),
Err(e) => Ok(e.to_string().contains(&format!("Table {table_name} already exists"))),
}
}),
});
let mut interactions = Vec::new();
interactions.push(assumption);
interactions.push(cq1);
interactions.extend(queries.clone().into_iter().map(Interaction::Query));
interactions.push(cq2);
interactions.push(assertion);
interactions
}
}
}
}
pub(crate) struct Remaining {
pub(crate) read: f64,
pub(crate) write: f64,
pub(crate) create: f64,
}
pub(crate) fn remaining(env: &SimulatorEnv, stats: &InteractionStats) -> Remaining {
let remaining_read = ((env.opts.max_interactions as f64 * env.opts.read_percent / 100.0)
- (stats.read_count as f64))
.max(0.0);
let remaining_write = ((env.opts.max_interactions as f64 * env.opts.write_percent / 100.0)
- (stats.write_count as f64))
.max(0.0);
let remaining_create = ((env.opts.max_interactions as f64 * env.opts.create_percent / 100.0)
- (stats.create_count as f64))
.max(0.0);
Remaining {
read: remaining_read,
write: remaining_write,
create: remaining_create,
}
}
fn property_insert_select<R: rand::Rng>(
rng: &mut R,
env: &SimulatorEnv,
remaining: &Remaining,
) -> Property {
// Get a random table
let table = pick(&env.tables, rng);
// Generate rows to insert
let rows = (0..rng.gen_range(1..=5))
.map(|_| Vec::<Value>::arbitrary_from(rng, table))
.collect::<Vec<_>>();
// Pick a random row to select
let row_index = pick_index(rows.len(), rng).clone();
let row = rows[row_index].clone();
// Insert the rows
let insert_query = Insert {
table: table.name.clone(),
values: rows,
};
// Create random queries respecting the constraints
let mut queries = Vec::new();
// - [x] There will be no errors in the middle interactions. (this constraint is impossible to check, so this is just best effort)
// - [x] The inserted row will not be deleted.
// - [ ] The inserted row will not be updated. (todo: add this constraint once UPDATE is implemented)
// - [ ] The table `t` will not be renamed, dropped, or altered. (todo: add this constraint once ALTER or DROP is implemented)
for _ in 0..rng.gen_range(0..3) {
let query = Query::arbitrary_from(rng, (table, remaining));
match &query {
Query::Delete(Delete {
table: t,
predicate,
}) => {
// The inserted row will not be deleted.
if t == &table.name && predicate.test(&row, &table) {
continue;
}
}
Query::Create(Create { table: t }) => {
// There will be no errors in the middle interactions.
// - Creating the same table is an error
if t.name == table.name {
continue;
}
}
_ => (),
}
queries.push(query);
}
// Select the row
let select_query = Select {
table: table.name.clone(),
predicate: Predicate::arbitrary_from(rng, (table, &row)),
};
Property::InsertSelect {
insert: insert_query,
row_index,
queries,
select: select_query,
}
}
fn property_double_create_failure<R: rand::Rng>(
rng: &mut R,
env: &SimulatorEnv,
remaining: &Remaining,
) -> Property {
// Get a random table
let table = pick(&env.tables, rng);
// Create the table
let create_query = Create {
table: table.clone(),
};
// Create random queries respecting the constraints
let mut queries = Vec::new();
// The interactions in the middle has the following constraints;
// - [x] There will be no errors in the middle interactions.(best effort)
// - [ ] Table `t` will not be renamed or dropped.(todo: add this constraint once ALTER or DROP is implemented)
for _ in 0..rng.gen_range(0..3) {
let query = Query::arbitrary_from(rng, (table, remaining));
match &query {
Query::Create(Create { table: t }) => {
// There will be no errors in the middle interactions.
// - Creating the same table is an error
if t.name == table.name {
continue;
}
}
_ => (),
}
queries.push(query);
}
Property::DoubleCreateFailure {
create: create_query,
queries,
}
}
impl ArbitraryFrom<(&SimulatorEnv, &InteractionStats)> for Property {
fn arbitrary_from<R: rand::Rng>(
rng: &mut R,
(env, stats): (&SimulatorEnv, &InteractionStats),
) -> Self {
let remaining_ = remaining(env, stats);
frequency(
vec![
(
f64::min(remaining_.read, remaining_.write),
Box::new(|rng: &mut R| property_insert_select(rng, env, &remaining_)),
),
(
remaining_.create / 2.0,
Box::new(|rng: &mut R| property_double_create_failure(rng, env, &remaining_)),
),
],
rng,
)
}
}

View File

@@ -3,8 +3,10 @@ use crate::generation::{one_of, Arbitrary, ArbitraryFrom};
use crate::model::query::{Create, Delete, Insert, Predicate, Query, Select};
use crate::model::table::{Table, Value};
use rand::seq::SliceRandom as _;
use rand::Rng;
use super::property::Remaining;
use super::{frequency, pick};
impl Arbitrary for Create {
@@ -15,7 +17,7 @@ impl Arbitrary for Create {
}
}
impl ArbitraryFrom<Vec<Table>> for Select {
impl ArbitraryFrom<&Vec<Table>> for Select {
fn arbitrary_from<R: Rng>(rng: &mut R, tables: &Vec<Table>) -> Self {
let table = pick(tables, rng);
Self {
@@ -25,7 +27,7 @@ impl ArbitraryFrom<Vec<Table>> for Select {
}
}
impl ArbitraryFrom<Vec<&Table>> for Select {
impl ArbitraryFrom<&Vec<&Table>> for Select {
fn arbitrary_from<R: Rng>(rng: &mut R, tables: &Vec<&Table>) -> Self {
let table = pick(tables, rng);
Self {
@@ -35,7 +37,7 @@ impl ArbitraryFrom<Vec<&Table>> for Select {
}
}
impl ArbitraryFrom<Table> for Insert {
impl ArbitraryFrom<&Table> for Insert {
fn arbitrary_from<R: Rng>(rng: &mut R, table: &Table) -> Self {
let num_rows = rng.gen_range(1..10);
let values: Vec<Vec<Value>> = (0..num_rows)
@@ -54,7 +56,7 @@ impl ArbitraryFrom<Table> for Insert {
}
}
impl ArbitraryFrom<Table> for Delete {
impl ArbitraryFrom<&Table> for Delete {
fn arbitrary_from<R: Rng>(rng: &mut R, table: &Table) -> Self {
Self {
table: table.name.clone(),
@@ -63,7 +65,7 @@ impl ArbitraryFrom<Table> for Delete {
}
}
impl ArbitraryFrom<Table> for Query {
impl ArbitraryFrom<&Table> for Query {
fn arbitrary_from<R: Rng>(rng: &mut R, table: &Table) -> Self {
frequency(
vec![
@@ -86,11 +88,37 @@ impl ArbitraryFrom<Table> for Query {
}
}
impl ArbitraryFrom<(&Table, &Remaining)> for Query {
fn arbitrary_from<R: Rng>(rng: &mut R, (table, remaining): (&Table, &Remaining)) -> Self {
frequency(
vec![
(
remaining.create,
Box::new(|rng| Self::Create(Create::arbitrary(rng))),
),
(
remaining.read,
Box::new(|rng| Self::Select(Select::arbitrary_from(rng, &vec![table]))),
),
(
remaining.write,
Box::new(|rng| Self::Insert(Insert::arbitrary_from(rng, table))),
),
(
0.0,
Box::new(|rng| Self::Delete(Delete::arbitrary_from(rng, table))),
),
],
rng,
)
}
}
struct CompoundPredicate(Predicate);
struct SimplePredicate(Predicate);
impl ArbitraryFrom<(&Table, bool)> for SimplePredicate {
fn arbitrary_from<R: Rng>(rng: &mut R, (table, predicate_value): &(&Table, bool)) -> Self {
fn arbitrary_from<R: Rng>(rng: &mut R, (table, predicate_value): (&Table, bool)) -> Self {
// Pick a random column
let column_index = rng.gen_range(0..table.columns.len());
let column = &table.columns[column_index];
@@ -154,61 +182,61 @@ impl ArbitraryFrom<(&Table, bool)> for SimplePredicate {
}
impl ArbitraryFrom<(&Table, bool)> for CompoundPredicate {
fn arbitrary_from<R: Rng>(rng: &mut R, (table, predicate_value): &(&Table, bool)) -> Self {
fn arbitrary_from<R: Rng>(rng: &mut R, (table, predicate_value): (&Table, bool)) -> Self {
// Decide if you want to create an AND or an OR
Self(if rng.gen_bool(0.7) {
// An AND for true requires each of its children to be true
// An AND for false requires at least one of its children to be false
if *predicate_value {
if predicate_value {
Predicate::And(
(0..rng.gen_range(1..=3))
.map(|_| SimplePredicate::arbitrary_from(rng, &(*table, true)).0)
(0..rng.gen_range(0..=3))
.map(|_| SimplePredicate::arbitrary_from(rng, (table, true)).0)
.collect(),
)
} else {
// Create a vector of random booleans
let mut booleans = (0..rng.gen_range(1..=3))
let mut booleans = (0..rng.gen_range(0..=3))
.map(|_| rng.gen_bool(0.5))
.collect::<Vec<_>>();
let len = booleans.len();
// Make sure at least one of them is false
if booleans.iter().all(|b| *b) {
if !booleans.is_empty() && booleans.iter().all(|b| *b) {
booleans[rng.gen_range(0..len)] = false;
}
Predicate::And(
booleans
.iter()
.map(|b| SimplePredicate::arbitrary_from(rng, &(*table, *b)).0)
.map(|b| SimplePredicate::arbitrary_from(rng, (table, *b)).0)
.collect(),
)
}
} else {
// An OR for true requires at least one of its children to be true
// An OR for false requires each of its children to be false
if *predicate_value {
if predicate_value {
// Create a vector of random booleans
let mut booleans = (0..rng.gen_range(1..=3))
let mut booleans = (0..rng.gen_range(0..=3))
.map(|_| rng.gen_bool(0.5))
.collect::<Vec<_>>();
let len = booleans.len();
// Make sure at least one of them is true
if booleans.iter().all(|b| !*b) {
if !booleans.is_empty() && booleans.iter().all(|b| !*b) {
booleans[rng.gen_range(0..len)] = true;
}
Predicate::Or(
booleans
.iter()
.map(|b| SimplePredicate::arbitrary_from(rng, &(*table, *b)).0)
.map(|b| SimplePredicate::arbitrary_from(rng, (table, *b)).0)
.collect(),
)
} else {
Predicate::Or(
(0..rng.gen_range(1..=3))
.map(|_| SimplePredicate::arbitrary_from(rng, &(*table, false)).0)
(0..rng.gen_range(0..=3))
.map(|_| SimplePredicate::arbitrary_from(rng, (table, false)).0)
.collect(),
)
}
@@ -216,28 +244,28 @@ impl ArbitraryFrom<(&Table, bool)> for CompoundPredicate {
}
}
impl ArbitraryFrom<Table> for Predicate {
impl ArbitraryFrom<&Table> for Predicate {
fn arbitrary_from<R: Rng>(rng: &mut R, table: &Table) -> Self {
let predicate_value = rng.gen_bool(0.5);
CompoundPredicate::arbitrary_from(rng, &(table, predicate_value)).0
CompoundPredicate::arbitrary_from(rng, (table, predicate_value)).0
}
}
impl ArbitraryFrom<(&str, &Value)> for Predicate {
fn arbitrary_from<R: Rng>(rng: &mut R, (column_name, value): &(&str, &Value)) -> Self {
fn arbitrary_from<R: Rng>(rng: &mut R, (column_name, value): (&str, &Value)) -> Self {
one_of(
vec![
Box::new(|_| Predicate::Eq(column_name.to_string(), (*value).clone())),
Box::new(|rng| {
Self::Gt(
column_name.to_string(),
GTValue::arbitrary_from(rng, *value).0,
GTValue::arbitrary_from(rng, value).0,
)
}),
Box::new(|rng| {
Self::Lt(
column_name.to_string(),
LTValue::arbitrary_from(rng, *value).0,
LTValue::arbitrary_from(rng, value).0,
)
}),
],
@@ -245,3 +273,155 @@ impl ArbitraryFrom<(&str, &Value)> for Predicate {
)
}
}
/// Produces a predicate that is true for the provided row in the given table
fn produce_true_predicate<R: Rng>(rng: &mut R, (t, row): (&Table, &Vec<Value>)) -> Predicate {
// Pick a column
let column_index = rng.gen_range(0..t.columns.len());
let column = &t.columns[column_index];
let value = &row[column_index];
one_of(
vec![
Box::new(|_| Predicate::Eq(column.name.clone(), value.clone())),
Box::new(|rng| {
let v = loop {
let v = Value::arbitrary_from(rng, &column.column_type);
if &v != value {
break v;
}
};
Predicate::Neq(column.name.clone(), v)
}),
Box::new(|rng| {
Predicate::Gt(column.name.clone(), LTValue::arbitrary_from(rng, value).0)
}),
Box::new(|rng| {
Predicate::Lt(column.name.clone(), GTValue::arbitrary_from(rng, value).0)
}),
],
rng,
)
}
/// Produces a predicate that is false for the provided row in the given table
fn produce_false_predicate<R: Rng>(rng: &mut R, (t, row): (&Table, &Vec<Value>)) -> Predicate {
// Pick a column
let column_index = rng.gen_range(0..t.columns.len());
let column = &t.columns[column_index];
let value = &row[column_index];
one_of(
vec![
Box::new(|_| Predicate::Neq(column.name.clone(), value.clone())),
Box::new(|rng| {
let v = loop {
let v = Value::arbitrary_from(rng, &column.column_type);
if &v != value {
break v;
}
};
Predicate::Eq(column.name.clone(), v)
}),
Box::new(|rng| {
Predicate::Gt(column.name.clone(), GTValue::arbitrary_from(rng, value).0)
}),
Box::new(|rng| {
Predicate::Lt(column.name.clone(), LTValue::arbitrary_from(rng, value).0)
}),
],
rng,
)
}
impl ArbitraryFrom<(&Table, &Vec<Value>)> for Predicate {
fn arbitrary_from<R: Rng>(rng: &mut R, (t, row): (&Table, &Vec<Value>)) -> Self {
// We want to produce a predicate that is true for the row
// We can do this by creating several predicates that
// are true, some that are false, combiend them in ways that correspond to the creation of a true predicate
// Produce some true and false predicates
let mut true_predicates = (1..=rng.gen_range(1..=4))
.map(|_| produce_true_predicate(rng, (t, row)))
.collect::<Vec<_>>();
let false_predicates = (0..=rng.gen_range(0..=3))
.map(|_| produce_false_predicate(rng, (t, row)))
.collect::<Vec<_>>();
// Start building a top level predicate from a true predicate
let mut result = true_predicates.pop().unwrap();
let mut predicates = true_predicates
.iter()
.map(|p| (true, p.clone()))
.chain(false_predicates.iter().map(|p| (false, p.clone())))
.collect::<Vec<_>>();
predicates.shuffle(rng);
while !predicates.is_empty() {
// Create a new predicate from at least 1 and at most 3 predicates
let context =
predicates[0..rng.gen_range(0..=usize::min(3, predicates.len()))].to_vec();
// Shift `predicates` to remove the predicates in the context
predicates = predicates[context.len()..].to_vec();
// `result` is true, so we have the following three options to make a true predicate:
// T or F
// T or T
// T and T
result = one_of(
vec![
// T or (X1 or X2 or ... or Xn)
Box::new(|_| {
Predicate::Or(vec![
result.clone(),
Predicate::Or(context.iter().map(|(_, p)| p.clone()).collect()),
])
}),
// T or (T1 and T2 and ... and Tn)
Box::new(|_| {
Predicate::Or(vec![
result.clone(),
Predicate::And(context.iter().map(|(_, p)| p.clone()).collect()),
])
}),
// T and T
Box::new(|_| {
// Check if all the predicates in the context are true
if context.iter().all(|(b, _)| *b) {
// T and (X1 or X2 or ... or Xn)
Predicate::And(vec![
result.clone(),
Predicate::And(context.iter().map(|(_, p)| p.clone()).collect()),
])
}
// Check if there is at least one true predicate
else if context.iter().any(|(b, _)| *b) {
// T and (X1 or X2 or ... or Xn)
Predicate::And(vec![
result.clone(),
Predicate::Or(context.iter().map(|(_, p)| p.clone()).collect()),
])
} else {
// T and (X1 or X2 or ... or Xn or TRUE)
Predicate::And(vec![
result.clone(),
Predicate::Or(
context
.iter()
.map(|(_, p)| p.clone())
.chain(std::iter::once(Predicate::true_()))
.collect(),
),
])
}
}),
],
rng,
);
}
result
}
}

View File

@@ -1,8 +1,6 @@
use rand::Rng;
use crate::generation::{
gen_random_text, pick, pick_index, readable_name_custom, Arbitrary, ArbitraryFrom,
};
use crate::generation::{gen_random_text, pick, readable_name_custom, Arbitrary, ArbitraryFrom};
use crate::model::table::{Column, ColumnType, Name, Table, Value};
impl Arbitrary for Name {
@@ -15,7 +13,7 @@ impl Arbitrary for Name {
impl Arbitrary for Table {
fn arbitrary<R: Rng>(rng: &mut R) -> Self {
let name = Name::arbitrary(rng).0;
let columns = (1..=rng.gen_range(1..5))
let columns = (1..=rng.gen_range(1..10))
.map(|_| Column::arbitrary(rng))
.collect();
Table {
@@ -45,7 +43,18 @@ impl Arbitrary for ColumnType {
}
}
impl ArbitraryFrom<Vec<&Value>> for Value {
impl ArbitraryFrom<&Table> for Vec<Value> {
fn arbitrary_from<R: rand::Rng>(rng: &mut R, table: &Table) -> Self {
let mut row = Vec::new();
for column in table.columns.iter() {
let value = Value::arbitrary_from(rng, &column.column_type);
row.push(value);
}
row
}
}
impl ArbitraryFrom<&Vec<&Value>> for Value {
fn arbitrary_from<R: Rng>(rng: &mut R, values: &Vec<&Self>) -> Self {
if values.is_empty() {
return Self::Null;
@@ -55,7 +64,7 @@ impl ArbitraryFrom<Vec<&Value>> for Value {
}
}
impl ArbitraryFrom<ColumnType> for Value {
impl ArbitraryFrom<&ColumnType> for Value {
fn arbitrary_from<R: Rng>(rng: &mut R, column_type: &ColumnType) -> Self {
match column_type {
ColumnType::Integer => Self::Integer(rng.gen_range(i64::MIN..i64::MAX)),
@@ -68,22 +77,22 @@ impl ArbitraryFrom<ColumnType> for Value {
pub(crate) struct LTValue(pub(crate) Value);
impl ArbitraryFrom<Vec<&Value>> for LTValue {
impl ArbitraryFrom<&Vec<&Value>> for LTValue {
fn arbitrary_from<R: Rng>(rng: &mut R, values: &Vec<&Value>) -> Self {
if values.is_empty() {
return Self(Value::Null);
}
let index = pick_index(values.len(), rng);
Self::arbitrary_from(rng, values[index])
let value = pick(values, rng);
Self::arbitrary_from(rng, *value)
}
}
impl ArbitraryFrom<Value> for LTValue {
impl ArbitraryFrom<&Value> for LTValue {
fn arbitrary_from<R: Rng>(rng: &mut R, value: &Value) -> Self {
match value {
Value::Integer(i) => Self(Value::Integer(rng.gen_range(i64::MIN..*i - 1))),
Value::Float(f) => Self(Value::Float(rng.gen_range(-1e10..*f - 1.0))),
Value::Float(f) => Self(Value::Float(f - rng.gen_range(0.0..1e10))),
Value::Text(t) => {
// Either shorten the string, or make at least one character smaller and mutate the rest
let mut t = t.clone();
@@ -128,18 +137,18 @@ impl ArbitraryFrom<Value> for LTValue {
pub(crate) struct GTValue(pub(crate) Value);
impl ArbitraryFrom<Vec<&Value>> for GTValue {
impl ArbitraryFrom<&Vec<&Value>> for GTValue {
fn arbitrary_from<R: Rng>(rng: &mut R, values: &Vec<&Value>) -> Self {
if values.is_empty() {
return Self(Value::Null);
}
let index = pick_index(values.len(), rng);
Self::arbitrary_from(rng, values[index])
let value = pick(values, rng);
Self::arbitrary_from(rng, *value)
}
}
impl ArbitraryFrom<Value> for GTValue {
impl ArbitraryFrom<&Value> for GTValue {
fn arbitrary_from<R: Rng>(rng: &mut R, value: &Value) -> Self {
match value {
Value::Integer(i) => Self(Value::Integer(rng.gen_range(*i..i64::MAX))),

View File

@@ -1,28 +1,68 @@
#![allow(clippy::arc_with_non_send_sync, dead_code)]
use clap::Parser;
use generation::plan::{Interaction, InteractionPlan, ResultSet};
use generation::{pick_index, ArbitraryFrom};
use limbo_core::{Database, Result};
use model::table::Value;
use core::panic;
use generation::plan::{InteractionPlan, InteractionPlanState};
use generation::ArbitraryFrom;
use limbo_core::Database;
use rand::prelude::*;
use rand_chacha::ChaCha8Rng;
use runner::cli::SimulatorCLI;
use runner::env::{SimConnection, SimulatorEnv, SimulatorOpts};
use runner::execution::{execute_plans, Execution, ExecutionHistory, ExecutionResult};
use runner::io::SimulatorIO;
use std::any::Any;
use std::backtrace::Backtrace;
use std::io::Write;
use std::path::Path;
use std::sync::Arc;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use tempfile::TempDir;
mod generation;
mod model;
mod runner;
mod shrink;
struct Paths {
db: PathBuf,
plan: PathBuf,
shrunk_plan: PathBuf,
history: PathBuf,
doublecheck_db: PathBuf,
shrunk_db: PathBuf,
}
fn main() {
let _ = env_logger::try_init();
impl Paths {
fn new(output_dir: &Path, shrink: bool, doublecheck: bool) -> Self {
let paths = Paths {
db: PathBuf::from(output_dir).join("simulator.db"),
plan: PathBuf::from(output_dir).join("simulator.plan"),
shrunk_plan: PathBuf::from(output_dir).join("simulator_shrunk.plan"),
history: PathBuf::from(output_dir).join("simulator.history"),
doublecheck_db: PathBuf::from(output_dir).join("simulator_double.db"),
shrunk_db: PathBuf::from(output_dir).join("simulator_shrunk.db"),
};
// Print the seed, the locations of the database and the plan file
log::info!("database path: {:?}", paths.db);
if doublecheck {
log::info!("doublecheck database path: {:?}", paths.doublecheck_db);
} else if shrink {
log::info!("shrunk database path: {:?}", paths.shrunk_db);
}
log::info!("simulator plan path: {:?}", paths.plan);
if shrink {
log::info!("shrunk plan path: {:?}", paths.shrunk_plan);
}
log::info!("simulator history path: {:?}", paths.history);
paths
}
}
fn main() -> Result<(), String> {
init_logger();
let cli_opts = SimulatorCLI::parse();
cli_opts.validate()?;
let seed = match cli_opts.seed {
Some(seed) => seed,
@@ -31,17 +71,16 @@ fn main() {
let output_dir = match &cli_opts.output_dir {
Some(dir) => Path::new(dir).to_path_buf(),
None => TempDir::new().unwrap().into_path(),
None => TempDir::new().map_err(|e| format!("{:?}", e))?.into_path(),
};
let db_path = output_dir.join("simulator.db");
let plan_path = output_dir.join("simulator.plan");
banner();
let paths = Paths::new(&output_dir, cli_opts.shrink, cli_opts.doublecheck);
// Print the seed, the locations of the database and the plan file
log::info!("database path: {:?}", db_path);
log::info!("simulator plan path: {:?}", plan_path);
log::info!("seed: {}", seed);
let last_execution = Arc::new(Mutex::new(Execution::new(0, 0, 0)));
std::panic::set_hook(Box::new(move |info| {
log::error!("panic occurred");
@@ -58,83 +97,252 @@ fn main() {
log::error!("captured backtrace:\n{}", bt);
}));
let result = std::panic::catch_unwind(|| run_simulation(seed, &cli_opts, &db_path, &plan_path));
let result = SandboxedResult::from(
std::panic::catch_unwind(|| {
run_simulation(
seed,
&cli_opts,
&paths.db,
&paths.plan,
last_execution.clone(),
None,
)
}),
last_execution.clone(),
);
if cli_opts.doublecheck {
// Move the old database and plan file to a new location
let old_db_path = db_path.with_extension("_old.db");
let old_plan_path = plan_path.with_extension("_old.plan");
std::fs::rename(&db_path, &old_db_path).unwrap();
std::fs::rename(&plan_path, &old_plan_path).unwrap();
// Run the simulation again
let result2 =
std::panic::catch_unwind(|| run_simulation(seed, &cli_opts, &db_path, &plan_path));
let result2 = SandboxedResult::from(
std::panic::catch_unwind(|| {
run_simulation(
seed,
&cli_opts,
&paths.doublecheck_db,
&paths.plan,
last_execution.clone(),
None,
)
}),
last_execution.clone(),
);
match (result, result2) {
(Ok(Ok(_)), Err(_)) => {
(SandboxedResult::Correct, SandboxedResult::Panicked { .. }) => {
log::error!("doublecheck failed! first run succeeded, but second run panicked.");
}
(Ok(Err(_)), Err(_)) => {
(SandboxedResult::FoundBug { .. }, SandboxedResult::Panicked { .. }) => {
log::error!(
"doublecheck failed! first run failed assertion, but second run panicked."
"doublecheck failed! first run failed an assertion, but second run panicked."
);
}
(Err(_), Ok(Ok(_))) => {
(SandboxedResult::Panicked { .. }, SandboxedResult::Correct) => {
log::error!("doublecheck failed! first run panicked, but second run succeeded.");
}
(Err(_), Ok(Err(_))) => {
(SandboxedResult::Panicked { .. }, SandboxedResult::FoundBug { .. }) => {
log::error!(
"doublecheck failed! first run panicked, but second run failed assertion."
"doublecheck failed! first run panicked, but second run failed an assertion."
);
}
(Ok(Ok(_)), Ok(Err(_))) => {
(SandboxedResult::Correct, SandboxedResult::FoundBug { .. }) => {
log::error!(
"doublecheck failed! first run succeeded, but second run failed assertion."
"doublecheck failed! first run succeeded, but second run failed an assertion."
);
}
(Ok(Err(_)), Ok(Ok(_))) => {
(SandboxedResult::FoundBug { .. }, SandboxedResult::Correct) => {
log::error!(
"doublecheck failed! first run failed assertion, but second run succeeded."
"doublecheck failed! first run failed an assertion, but second run succeeded."
);
}
(Err(_), Err(_)) | (Ok(_), Ok(_)) => {
(SandboxedResult::Correct, SandboxedResult::Correct)
| (SandboxedResult::FoundBug { .. }, SandboxedResult::FoundBug { .. })
| (SandboxedResult::Panicked { .. }, SandboxedResult::Panicked { .. }) => {
// Compare the two database files byte by byte
let old_db = std::fs::read(&old_db_path).unwrap();
let new_db = std::fs::read(&db_path).unwrap();
if old_db != new_db {
let db_bytes = std::fs::read(&paths.db).unwrap();
let doublecheck_db_bytes = std::fs::read(&paths.doublecheck_db).unwrap();
if db_bytes != doublecheck_db_bytes {
log::error!("doublecheck failed! database files are different.");
} else {
log::info!("doublecheck succeeded! database files are the same.");
}
}
}
// Move the new database and plan file to a new location
let new_db_path = db_path.with_extension("_double.db");
let new_plan_path = plan_path.with_extension("_double.plan");
std::fs::rename(&db_path, &new_db_path).unwrap();
std::fs::rename(&plan_path, &new_plan_path).unwrap();
// Move the old database and plan file back
std::fs::rename(&old_db_path, &db_path).unwrap();
std::fs::rename(&old_plan_path, &plan_path).unwrap();
} else if let Ok(result) = result {
match result {
Ok(_) => {
log::info!("simulation completed successfully");
} else {
// No doublecheck, run shrinking if panicking or found a bug.
match &result {
SandboxedResult::Correct => {
log::info!("simulation succeeded");
}
Err(e) => {
log::error!("simulation failed: {:?}", e);
SandboxedResult::Panicked {
error,
last_execution,
}
| SandboxedResult::FoundBug {
error,
last_execution,
..
} => {
if let SandboxedResult::FoundBug { history, .. } = &result {
// No panic occurred, so write the history to a file
let f = std::fs::File::create(&paths.history).unwrap();
let mut f = std::io::BufWriter::new(f);
for execution in history.history.iter() {
writeln!(
f,
"{} {} {}",
execution.connection_index,
execution.interaction_index,
execution.secondary_index
)
.unwrap();
}
}
log::error!("simulation failed: '{}'", error);
if cli_opts.shrink {
log::info!("Starting to shrink");
let shrink = Some(last_execution);
let last_execution = Arc::new(Mutex::new(*last_execution));
let shrunk = SandboxedResult::from(
std::panic::catch_unwind(|| {
run_simulation(
seed,
&cli_opts,
&paths.shrunk_db,
&paths.shrunk_plan,
last_execution.clone(),
shrink,
)
}),
last_execution,
);
match (&shrunk, &result) {
(
SandboxedResult::Panicked { error: e1, .. },
SandboxedResult::Panicked { error: e2, .. },
)
| (
SandboxedResult::FoundBug { error: e1, .. },
SandboxedResult::FoundBug { error: e2, .. },
) => {
if e1 != e2 {
log::error!(
"shrinking failed, the error was not properly reproduced"
);
} else {
log::info!("shrinking succeeded");
}
}
(_, SandboxedResult::Correct) => {
unreachable!("shrinking should never be called on a correct simulation")
}
_ => {
log::error!("shrinking failed, the error was not properly reproduced");
}
}
// Write the shrunk plan to a file
let shrunk_plan = std::fs::read(&paths.shrunk_plan).unwrap();
let mut f = std::fs::File::create(&paths.shrunk_plan).unwrap();
f.write_all(&shrunk_plan).unwrap();
}
}
}
}
// Print the seed, the locations of the database and the plan file at the end again for easily accessing them.
println!("database path: {:?}", db_path);
println!("simulator plan path: {:?}", plan_path);
println!("database path: {:?}", paths.db);
if cli_opts.doublecheck {
println!("doublecheck database path: {:?}", paths.doublecheck_db);
} else if cli_opts.shrink {
println!("shrunk database path: {:?}", paths.shrunk_db);
}
println!("simulator plan path: {:?}", paths.plan);
if cli_opts.shrink {
println!("shrunk plan path: {:?}", paths.shrunk_plan);
}
println!("simulator history path: {:?}", paths.history);
println!("seed: {}", seed);
Ok(())
}
fn move_db_and_plan_files(output_dir: &Path) {
let old_db_path = output_dir.join("simulator.db");
let old_plan_path = output_dir.join("simulator.plan");
let new_db_path = output_dir.join("simulator_double.db");
let new_plan_path = output_dir.join("simulator_double.plan");
std::fs::rename(&old_db_path, &new_db_path).unwrap();
std::fs::rename(&old_plan_path, &new_plan_path).unwrap();
}
fn revert_db_and_plan_files(output_dir: &Path) {
let old_db_path = output_dir.join("simulator.db");
let old_plan_path = output_dir.join("simulator.plan");
let new_db_path = output_dir.join("simulator_double.db");
let new_plan_path = output_dir.join("simulator_double.plan");
std::fs::rename(&new_db_path, &old_db_path).unwrap();
std::fs::rename(&new_plan_path, &old_plan_path).unwrap();
}
#[derive(Debug)]
enum SandboxedResult {
Panicked {
error: String,
last_execution: Execution,
},
FoundBug {
error: String,
history: ExecutionHistory,
last_execution: Execution,
},
Correct,
}
impl SandboxedResult {
fn from(
result: Result<ExecutionResult, Box<dyn Any + Send>>,
last_execution: Arc<Mutex<Execution>>,
) -> Self {
match result {
Ok(ExecutionResult { error: None, .. }) => SandboxedResult::Correct,
Ok(ExecutionResult { error: Some(e), .. }) => {
let error = format!("{:?}", e);
let last_execution = last_execution.lock().unwrap();
SandboxedResult::Panicked {
error,
last_execution: *last_execution,
}
}
Err(payload) => {
log::error!("panic occurred");
let err = if let Some(s) = payload.downcast_ref::<&str>() {
log::error!("{}", s);
s.to_string()
} else if let Some(s) = payload.downcast_ref::<String>() {
log::error!("{}", s);
s.to_string()
} else {
log::error!("unknown panic payload");
"unknown panic payload".to_string()
};
last_execution.clear_poison();
SandboxedResult::Panicked {
error: err,
last_execution: *last_execution.lock().unwrap(),
}
}
}
}
}
fn run_simulation(
@@ -142,7 +350,9 @@ fn run_simulation(
cli_opts: &SimulatorCLI,
db_path: &Path,
plan_path: &Path,
) -> Result<()> {
last_execution: Arc<Mutex<Execution>>,
shrink: Option<&Execution>,
) -> ExecutionResult {
let mut rng = ChaCha8Rng::seed_from_u64(seed);
let (create_percent, read_percent, write_percent, delete_percent) = {
@@ -159,24 +369,6 @@ fn run_simulation(
(create_percent, read_percent, write_percent, delete_percent)
};
if cli_opts.minimum_size < 1 {
return Err(limbo_core::LimboError::InternalError(
"minimum size must be at least 1".to_string(),
));
}
if cli_opts.maximum_size < 1 {
return Err(limbo_core::LimboError::InternalError(
"maximum size must be at least 1".to_string(),
));
}
if cli_opts.maximum_size < cli_opts.minimum_size {
return Err(limbo_core::LimboError::InternalError(
"maximum size must be greater than or equal to minimum size".to_string(),
));
}
let opts = SimulatorOpts {
ticks: rng.gen_range(cli_opts.minimum_size..=cli_opts.maximum_size),
max_connections: 1, // TODO: for now let's use one connection as we didn't implement
@@ -212,21 +404,36 @@ fn run_simulation(
log::info!("Generating database interaction plan...");
let mut plans = (1..=env.opts.max_connections)
.map(|_| InteractionPlan::arbitrary_from(&mut env.rng.clone(), &env))
.map(|_| InteractionPlan::arbitrary_from(&mut env.rng.clone(), &mut env))
.collect::<Vec<_>>();
let mut states = plans
.iter()
.map(|_| InteractionPlanState {
stack: vec![],
interaction_pointer: 0,
secondary_pointer: 0,
})
.collect::<Vec<_>>();
let plan = if let Some(failing_execution) = shrink {
// todo: for now, we only use 1 connection, so it's safe to use the first plan.
println!("Interactions Before: {}", plans[0].plan.len());
let shrunk = plans[0].shrink_interaction_plan(failing_execution);
println!("Interactions After: {}", shrunk.plan.len());
shrunk
} else {
plans[0].clone()
};
let mut f = std::fs::File::create(plan_path).unwrap();
// todo: create a detailed plan file with all the plans. for now, we only use 1 connection, so it's safe to use the first plan.
f.write_all(plans[0].to_string().as_bytes()).unwrap();
f.write_all(plan.to_string().as_bytes()).unwrap();
log::info!("{}", plans[0].stats());
log::info!("{}", plan.stats());
log::info!("Executing database interaction plan...");
let result = execute_plans(&mut env, &mut plans);
if result.is_err() {
log::error!("error executing plans: {:?}", result.as_ref().err());
}
let result = execute_plans(&mut env, &mut plans, &mut states, last_execution);
env.io.print_stats();
@@ -235,94 +442,37 @@ fn run_simulation(
result
}
fn execute_plans(env: &mut SimulatorEnv, plans: &mut [InteractionPlan]) -> Result<()> {
let now = std::time::Instant::now();
// todo: add history here by recording which interaction was executed at which tick
for _tick in 0..env.opts.ticks {
// Pick the connection to interact with
let connection_index = pick_index(env.connections.len(), &mut env.rng);
// Execute the interaction for the selected connection
execute_plan(env, connection_index, plans)?;
// Check if the maximum time for the simulation has been reached
if now.elapsed().as_secs() >= env.opts.max_time_simulation as u64 {
return Err(limbo_core::LimboError::InternalError(
"maximum time for simulation reached".into(),
));
}
}
Ok(())
fn init_logger() {
env_logger::Builder::from_env(env_logger::Env::default().filter_or("RUST_LOG", "info"))
.format_timestamp(None)
.format_module_path(false)
.format_target(false)
.init();
}
fn execute_plan(
env: &mut SimulatorEnv,
connection_index: usize,
plans: &mut [InteractionPlan],
) -> Result<()> {
let connection = &env.connections[connection_index];
let plan = &mut plans[connection_index];
if plan.interaction_pointer >= plan.plan.len() {
return Ok(());
}
let interaction = &plan.plan[plan.interaction_pointer];
if let SimConnection::Disconnected = connection {
log::info!("connecting {}", connection_index);
env.connections[connection_index] = SimConnection::Connected(env.db.connect());
} else {
match execute_interaction(env, connection_index, interaction, &mut plan.stack) {
Ok(_) => {
log::debug!("connection {} processed", connection_index);
plan.interaction_pointer += 1;
}
Err(err) => {
log::error!("error {}", err);
return Err(err);
}
}
}
Ok(())
fn banner() {
println!("{}", BANNER);
}
fn execute_interaction(
env: &mut SimulatorEnv,
connection_index: usize,
interaction: &Interaction,
stack: &mut Vec<ResultSet>,
) -> Result<()> {
log::info!("executing: {}", interaction);
match interaction {
generation::plan::Interaction::Query(_) => {
let conn = match &mut env.connections[connection_index] {
SimConnection::Connected(conn) => conn,
SimConnection::Disconnected => unreachable!(),
};
const BANNER: &str = r#"
,_______________________________.
| ,___________________________. |
| | | |
| | >HELLO | |
| | | |
| | >A STRANGE GAME. | |
| | >THE ONLY WINNING MOVE IS | |
| | >NOT TO PLAY. | |
| |___________________________| |
| |
| |
`-------------------------------`
| |
|______________|
,______________________.
/ /====================\ \
/ /======================\ \
/____________________________\
\____________________________/
log::debug!("{}", interaction);
let results = interaction.execute_query(conn);
log::debug!("{:?}", results);
stack.push(results);
}
generation::plan::Interaction::Assertion(_) => {
interaction.execute_assertion(stack)?;
stack.clear();
}
Interaction::Fault(_) => {
interaction.execute_fault(env, connection_index)?;
}
}
Ok(())
}
fn compare_equal_rows(a: &[Vec<Value>], b: &[Vec<Value>]) {
assert_eq!(a.len(), b.len(), "lengths are different");
for (r1, r2) in a.iter().zip(b) {
for (v1, v2) in r1.iter().zip(r2) {
assert_eq!(v1, v2, "values are different");
}
}
}
"#;

View File

@@ -12,6 +12,36 @@ pub(crate) enum Predicate {
Lt(String, Value), // column < Value
}
impl Predicate {
pub(crate) fn true_() -> Self {
Self::And(vec![])
}
pub(crate) fn false_() -> Self {
Self::Or(vec![])
}
pub(crate) fn test(&self, row: &[Value], table: &Table) -> bool {
let get_value = |name: &str| {
table
.columns
.iter()
.zip(row.iter())
.find(|(column, _)| column.name == name)
.map(|(_, value)| value)
};
match self {
Predicate::And(vec) => vec.iter().all(|p| p.test(row, table)),
Predicate::Or(vec) => vec.iter().any(|p| p.test(row, table)),
Predicate::Eq(column, value) => get_value(column) == Some(value),
Predicate::Neq(column, value) => get_value(column) != Some(value),
Predicate::Gt(column, value) => get_value(column).map(|v| v > value).unwrap_or(false),
Predicate::Lt(column, value) => get_value(column).map(|v| v < value).unwrap_or(false),
}
}
}
impl Display for Predicate {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@@ -53,7 +83,7 @@ impl Display for Predicate {
}
// This type represents the potential queries on the database.
#[derive(Debug)]
#[derive(Debug, Clone)]
pub(crate) enum Query {
Create(Create),
Select(Select),
@@ -61,6 +91,24 @@ pub(crate) enum Query {
Delete(Delete),
}
impl Query {
pub(crate) fn dependencies(&self) -> Vec<String> {
match self {
Query::Create(_) => vec![],
Query::Select(Select { table, .. })
| Query::Insert(Insert { table, .. })
| Query::Delete(Delete { table, .. }) => vec![table.clone()],
}
}
pub(crate) fn uses(&self) -> Vec<String> {
match self {
Query::Create(Create { table }) => vec![table.name.clone()],
Query::Select(Select { table, .. })
| Query::Insert(Insert { table, .. })
| Query::Delete(Delete { table, .. }) => vec![table.clone()],
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct Create {
pub(crate) table: Table,

View File

@@ -53,6 +53,22 @@ pub(crate) enum Value {
Blob(Vec<u8>),
}
impl PartialOrd for Value {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
match (self, other) {
(Self::Null, Self::Null) => Some(std::cmp::Ordering::Equal),
(Self::Null, _) => Some(std::cmp::Ordering::Less),
(_, Self::Null) => Some(std::cmp::Ordering::Greater),
(Self::Integer(i1), Self::Integer(i2)) => i1.partial_cmp(i2),
(Self::Float(f1), Self::Float(f2)) => f1.partial_cmp(f2),
(Self::Text(t1), Self::Text(t2)) => t1.partial_cmp(t2),
(Self::Blob(b1), Self::Blob(b2)) => b1.partial_cmp(b2),
// todo: add type coercions here
_ => None,
}
}
}
fn to_sqlite_blob(bytes: &[u8]) -> String {
format!(
"X'{}'",

View File

@@ -18,14 +18,14 @@ pub struct SimulatorCLI {
short = 'n',
long,
help = "change the maximum size of the randomly generated sequence of interactions",
default_value_t = 1024
default_value_t = 5000
)]
pub maximum_size: usize,
#[clap(
short = 'k',
long,
help = "change the minimum size of the randomly generated sequence of interactions",
default_value_t = 1
default_value_t = 1000
)]
pub minimum_size: usize,
#[clap(
@@ -35,4 +35,25 @@ pub struct SimulatorCLI {
default_value_t = 60 * 60 // default to 1 hour
)]
pub maximum_time: usize,
#[clap(
short = 'm',
long,
help = "minimize(shrink) the failing counterexample"
)]
pub shrink: bool,
}
impl SimulatorCLI {
pub fn validate(&self) -> Result<(), String> {
if self.minimum_size < 1 {
return Err("minimum size must be at least 1".to_string());
}
if self.maximum_size < 1 {
return Err("maximum size must be at least 1".to_string());
}
if self.minimum_size > self.maximum_size {
return Err("Minimum size cannot be greater than maximum size".to_string());
}
Ok(())
}
}

View File

@@ -0,0 +1,203 @@
use std::sync::{Arc, Mutex};
use limbo_core::{LimboError, Result};
use crate::generation::{
self, pick_index,
plan::{Interaction, InteractionPlan, InteractionPlanState, ResultSet},
};
use super::env::{SimConnection, SimulatorEnv};
#[derive(Debug, Clone, Copy)]
pub(crate) struct Execution {
pub(crate) connection_index: usize,
pub(crate) interaction_index: usize,
pub(crate) secondary_index: usize,
}
impl Execution {
pub(crate) fn new(
connection_index: usize,
interaction_index: usize,
secondary_index: usize,
) -> Self {
Self {
connection_index,
interaction_index,
secondary_index,
}
}
}
#[derive(Debug)]
pub(crate) struct ExecutionHistory {
pub(crate) history: Vec<Execution>,
}
impl ExecutionHistory {
fn new() -> Self {
Self {
history: Vec::new(),
}
}
}
pub(crate) struct ExecutionResult {
pub(crate) history: ExecutionHistory,
pub(crate) error: Option<limbo_core::LimboError>,
}
impl ExecutionResult {
fn new(history: ExecutionHistory, error: Option<LimboError>) -> Self {
Self { history, error }
}
}
pub(crate) fn execute_plans(
env: &mut SimulatorEnv,
plans: &mut [InteractionPlan],
states: &mut [InteractionPlanState],
last_execution: Arc<Mutex<Execution>>,
) -> ExecutionResult {
let mut history = ExecutionHistory::new();
let now = std::time::Instant::now();
for _tick in 0..env.opts.ticks {
// Pick the connection to interact with
let connection_index = pick_index(env.connections.len(), &mut env.rng);
let state = &mut states[connection_index];
history.history.push(Execution::new(
connection_index,
state.interaction_pointer,
state.secondary_pointer,
));
let mut last_execution = last_execution.lock().unwrap();
last_execution.connection_index = connection_index;
last_execution.interaction_index = state.interaction_pointer;
last_execution.secondary_index = state.secondary_pointer;
// Execute the interaction for the selected connection
match execute_plan(env, connection_index, plans, states) {
Ok(_) => {}
Err(err) => {
return ExecutionResult::new(history, Some(err));
}
}
// Check if the maximum time for the simulation has been reached
if now.elapsed().as_secs() >= env.opts.max_time_simulation as u64 {
return ExecutionResult::new(
history,
Some(limbo_core::LimboError::InternalError(
"maximum time for simulation reached".into(),
)),
);
}
}
ExecutionResult::new(history, None)
}
fn execute_plan(
env: &mut SimulatorEnv,
connection_index: usize,
plans: &mut [InteractionPlan],
states: &mut [InteractionPlanState],
) -> Result<()> {
let connection = &env.connections[connection_index];
let plan = &mut plans[connection_index];
let state = &mut states[connection_index];
if state.interaction_pointer >= plan.plan.len() {
return Ok(());
}
let interaction = &plan.plan[state.interaction_pointer].interactions()[state.secondary_pointer];
if let SimConnection::Disconnected = connection {
log::info!("connecting {}", connection_index);
env.connections[connection_index] = SimConnection::Connected(env.db.connect());
} else {
match execute_interaction(env, connection_index, interaction, &mut state.stack) {
Ok(next_execution) => {
log::debug!("connection {} processed", connection_index);
// Move to the next interaction or property
match next_execution {
ExecutionContinuation::NextInteraction => {
if state.secondary_pointer + 1
>= plan.plan[state.interaction_pointer].interactions().len()
{
// If we have reached the end of the interactions for this property, move to the next property
state.interaction_pointer += 1;
state.secondary_pointer = 0;
} else {
// Otherwise, move to the next interaction
state.secondary_pointer += 1;
}
}
ExecutionContinuation::NextProperty => {
// Skip to the next property
state.interaction_pointer += 1;
state.secondary_pointer = 0;
}
}
}
Err(err) => {
log::error!("error {}", err);
return Err(err);
}
}
}
Ok(())
}
/// The next point of control flow after executing an interaction.
/// `execute_interaction` uses this type in conjunction with a result, where
/// the `Err` case indicates a full-stop due to a bug, and the `Ok` case
/// indicates the next step in the plan.
enum ExecutionContinuation {
/// Default continuation, execute the next interaction.
NextInteraction,
/// Typically used in the case of preconditions failures, skip to the next property.
NextProperty,
}
fn execute_interaction(
env: &mut SimulatorEnv,
connection_index: usize,
interaction: &Interaction,
stack: &mut Vec<ResultSet>,
) -> Result<ExecutionContinuation> {
log::info!("executing: {}", interaction);
match interaction {
generation::plan::Interaction::Query(_) => {
let conn = match &mut env.connections[connection_index] {
SimConnection::Connected(conn) => conn,
SimConnection::Disconnected => unreachable!(),
};
log::debug!("{}", interaction);
let results = interaction.execute_query(conn);
log::debug!("{:?}", results);
stack.push(results);
}
generation::plan::Interaction::Assertion(_) => {
interaction.execute_assertion(stack, env)?;
stack.clear();
}
generation::plan::Interaction::Assumption(_) => {
let assumption_result = interaction.execute_assumption(stack, env);
stack.clear();
if assumption_result.is_err() {
log::warn!("assumption failed: {:?}", assumption_result);
return Ok(ExecutionContinuation::NextProperty);
}
}
Interaction::Fault(_) => {
interaction.execute_fault(env, connection_index)?;
}
}
Ok(ExecutionContinuation::NextInteraction)
}

View File

@@ -4,11 +4,22 @@ use limbo_core::{File, Result};
pub(crate) struct SimulatorFile {
pub(crate) inner: Rc<dyn File>,
pub(crate) fault: RefCell<bool>,
/// Number of `pread` function calls (both success and failures).
pub(crate) nr_pread_calls: RefCell<usize>,
/// Number of `pread` function calls with injected fault.
pub(crate) nr_pread_faults: RefCell<usize>,
/// Number of `pwrite` function calls (both success and failures).
pub(crate) nr_pwrite_calls: RefCell<usize>,
/// Number of `pwrite` function calls with injected fault.
pub(crate) nr_pwrite_faults: RefCell<usize>,
pub(crate) writes: RefCell<usize>,
pub(crate) reads: RefCell<usize>,
pub(crate) syncs: RefCell<usize>,
/// Number of `sync` function calls (both success and failures).
pub(crate) nr_sync_calls: RefCell<usize>,
pub(crate) page_size: usize,
}
@@ -18,14 +29,29 @@ impl SimulatorFile {
}
pub(crate) fn print_stats(&self) {
println!(
"pread faults: {}, pwrite faults: {}, reads: {}, writes: {}, syncs: {}",
*self.nr_pread_faults.borrow(),
*self.nr_pwrite_faults.borrow(),
*self.reads.borrow(),
*self.writes.borrow(),
*self.syncs.borrow(),
log::info!("op calls faults");
log::info!("--------- -------- --------");
log::info!(
"pread {:8} {:8}",
*self.nr_pread_calls.borrow(),
*self.nr_pread_faults.borrow()
);
log::info!(
"pwrite {:8} {:8}",
*self.nr_pwrite_calls.borrow(),
*self.nr_pwrite_faults.borrow()
);
log::info!(
"sync {:8} {:8}",
*self.nr_sync_calls.borrow(),
0 // No fault counter for sync
);
log::info!("--------- -------- --------");
let sum_calls = *self.nr_pread_calls.borrow()
+ *self.nr_pwrite_calls.borrow()
+ *self.nr_sync_calls.borrow();
let sum_faults = *self.nr_pread_faults.borrow() + *self.nr_pwrite_faults.borrow();
log::info!("total {:8} {:8}", sum_calls, sum_faults);
}
}
@@ -49,13 +75,13 @@ impl limbo_core::File for SimulatorFile {
}
fn pread(&self, pos: usize, c: Rc<limbo_core::Completion>) -> Result<()> {
*self.nr_pread_calls.borrow_mut() += 1;
if *self.fault.borrow() {
*self.nr_pread_faults.borrow_mut() += 1;
return Err(limbo_core::LimboError::InternalError(
"Injected fault".into(),
));
}
*self.reads.borrow_mut() += 1;
self.inner.pread(pos, c)
}
@@ -65,18 +91,18 @@ impl limbo_core::File for SimulatorFile {
buffer: Rc<std::cell::RefCell<limbo_core::Buffer>>,
c: Rc<limbo_core::Completion>,
) -> Result<()> {
*self.nr_pwrite_calls.borrow_mut() += 1;
if *self.fault.borrow() {
*self.nr_pwrite_faults.borrow_mut() += 1;
return Err(limbo_core::LimboError::InternalError(
"Injected fault".into(),
));
}
*self.writes.borrow_mut() += 1;
self.inner.pwrite(pos, buffer, c)
}
fn sync(&self, c: Rc<limbo_core::Completion>) -> Result<()> {
*self.syncs.borrow_mut() += 1;
*self.nr_sync_calls.borrow_mut() += 1;
self.inner.sync(c)
}

View File

@@ -40,8 +40,10 @@ impl SimulatorIO {
}
pub(crate) fn print_stats(&self) {
println!("run_once faults: {}", self.nr_run_once_faults.borrow());
log::info!("run_once faults: {}", self.nr_run_once_faults.borrow());
for file in self.files.borrow().iter() {
log::info!("");
log::info!("===========================");
file.print_stats();
}
}
@@ -60,9 +62,9 @@ impl IO for SimulatorIO {
fault: RefCell::new(false),
nr_pread_faults: RefCell::new(0),
nr_pwrite_faults: RefCell::new(0),
reads: RefCell::new(0),
writes: RefCell::new(0),
syncs: RefCell::new(0),
nr_pread_calls: RefCell::new(0),
nr_pwrite_calls: RefCell::new(0),
nr_sync_calls: RefCell::new(0),
page_size: self.page_size,
});
self.files.borrow_mut().push(file.clone());

View File

@@ -1,5 +1,6 @@
pub mod cli;
pub mod env;
pub mod execution;
#[allow(dead_code)]
pub mod file;
pub mod io;

1
simulator/shrink/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod plan;

53
simulator/shrink/plan.rs Normal file
View File

@@ -0,0 +1,53 @@
use crate::{
generation::plan::{InteractionPlan, Interactions},
model::query::Query,
runner::execution::Execution,
};
impl InteractionPlan {
/// Create a smaller interaction plan by deleting a property
pub(crate) fn shrink_interaction_plan(&self, failing_execution: &Execution) -> InteractionPlan {
// todo: this is a very naive implementation, next steps are;
// - Shrink to multiple values by removing random interactions
// - Shrink properties by removing their extensions, or shrinking their values
let mut plan = self.clone();
let failing_property = &self.plan[failing_execution.interaction_index];
let depending_tables = failing_property.dependencies();
let before = self.plan.len();
// Remove all properties after the failing one
plan.plan.truncate(failing_execution.interaction_index + 1);
// Remove all properties that do not use the failing tables
plan.plan
.retain(|p| p.uses().iter().any(|t| depending_tables.contains(t)));
// Remove the extensional parts of the properties
for interaction in plan.plan.iter_mut() {
if let Interactions::Property(p) = interaction {
match p {
crate::generation::property::Property::InsertSelect { queries, .. }
| crate::generation::property::Property::DoubleCreateFailure {
queries, ..
} => {
queries.clear();
}
}
}
}
plan.plan
.retain(|p| !matches!(p, Interactions::Query(Query::Select(_))));
let after = plan.plan.len();
log::info!(
"Shrinking interaction plan from {} to {} properties",
before,
after
);
plan
}
}

View File

@@ -110,6 +110,73 @@ mod tests {
Ok(())
}
#[test]
/// There was a regression with inserting multiple rows with a column containing an unary operator :)
/// https://github.com/tursodatabase/limbo/pull/679
fn test_regression_multi_row_insert() -> anyhow::Result<()> {
let _ = env_logger::try_init();
let tmp_db = TempDatabase::new("CREATE TABLE test (x REAL);");
let conn = tmp_db.connect_limbo();
let insert_query = "INSERT INTO test VALUES (-2), (-3), (-1)";
let list_query = "SELECT * FROM test";
match conn.query(insert_query) {
Ok(Some(ref mut rows)) => loop {
match rows.next_row()? {
StepResult::IO => {
tmp_db.io.run_once()?;
}
StepResult::Done => break,
_ => unreachable!(),
}
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
}
};
do_flush(&conn, &tmp_db)?;
let mut current_read_index = 1;
let expected_ids = vec![-3, -2, -1];
let mut actual_ids = Vec::new();
match conn.query(list_query) {
Ok(Some(ref mut rows)) => loop {
match rows.next_row()? {
StepResult::Row(row) => {
let first_value = row.values.first().expect("missing id");
let id = match first_value {
Value::Float(f) => *f as i32,
_ => panic!("expected float"),
};
actual_ids.push(id);
current_read_index += 1;
}
StepResult::IO => {
tmp_db.io.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
StepResult::Busy => {
panic!("Database is busy");
}
}
},
Ok(None) => {}
Err(err) => {
eprintln!("{}", err);
}
}
assert_eq!(current_read_index, 4); // Verify we read all rows
// sort ids
actual_ids.sort();
assert_eq!(actual_ids, expected_ids);
Ok(())
}
#[test]
fn test_simple_overflow_page() -> anyhow::Result<()> {
let _ = env_logger::try_init();
@@ -505,4 +572,132 @@ mod tests {
do_flush(&conn, &tmp_db)?;
Ok(())
}
#[test]
fn test_statement_reset() -> anyhow::Result<()> {
let _ = env_logger::try_init();
let tmp_db = TempDatabase::new("create table test (i integer);");
let conn = tmp_db.connect_limbo();
conn.execute("insert into test values (1)")?;
conn.execute("insert into test values (2)")?;
let mut stmt = conn.prepare("select * from test")?;
loop {
match stmt.step()? {
StepResult::Row(row) => {
assert_eq!(row.values[0], Value::Integer(1));
break;
}
StepResult::IO => tmp_db.io.run_once()?,
_ => break,
}
}
stmt.reset();
loop {
match stmt.step()? {
StepResult::Row(row) => {
assert_eq!(row.values[0], Value::Integer(1));
break;
}
StepResult::IO => tmp_db.io.run_once()?,
_ => break,
}
}
Ok(())
}
#[test]
fn test_statement_reset_bind() -> anyhow::Result<()> {
let _ = env_logger::try_init();
let tmp_db = TempDatabase::new("create table test (i integer);");
let conn = tmp_db.connect_limbo();
let mut stmt = conn.prepare("select ?")?;
stmt.bind_at(1.try_into().unwrap(), Value::Integer(1));
loop {
match stmt.step()? {
StepResult::Row(row) => {
assert_eq!(row.values[0], Value::Integer(1));
}
StepResult::IO => tmp_db.io.run_once()?,
_ => break,
}
}
stmt.reset();
stmt.bind_at(1.try_into().unwrap(), Value::Integer(2));
loop {
match stmt.step()? {
StepResult::Row(row) => {
assert_eq!(row.values[0], Value::Integer(2));
}
StepResult::IO => tmp_db.io.run_once()?,
_ => break,
}
}
Ok(())
}
#[test]
fn test_statement_bind() -> anyhow::Result<()> {
let _ = env_logger::try_init();
let tmp_db = TempDatabase::new("create table test (i integer);");
let conn = tmp_db.connect_limbo();
let mut stmt = conn.prepare("select ?, ?1, :named, ?3, ?4")?;
stmt.bind_at(1.try_into().unwrap(), Value::Text(&"hello".to_string()));
let i = stmt.parameters().index(":named").unwrap();
stmt.bind_at(i, Value::Integer(42));
stmt.bind_at(3.try_into().unwrap(), Value::Blob(&vec![0x1, 0x2, 0x3]));
stmt.bind_at(4.try_into().unwrap(), Value::Float(0.5));
assert_eq!(stmt.parameters().count(), 4);
loop {
match stmt.step()? {
StepResult::Row(row) => {
if let Value::Text(s) = row.values[0] {
assert_eq!(s, "hello")
}
if let Value::Text(s) = row.values[1] {
assert_eq!(s, "hello")
}
if let Value::Integer(i) = row.values[2] {
assert_eq!(i, 42)
}
if let Value::Blob(v) = row.values[3] {
assert_eq!(v, &vec![0x1 as u8, 0x2, 0x3])
}
if let Value::Float(f) = row.values[4] {
assert_eq!(f, 0.5)
}
}
StepResult::IO => {
tmp_db.io.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
StepResult::Busy => panic!("Database is busy"),
};
}
Ok(())
}
}

139
testing/extensions.py Executable file
View File

@@ -0,0 +1,139 @@
#!/usr/bin/env python3
import os
import subprocess
import select
import time
import uuid
sqlite_exec = "./target/debug/limbo"
sqlite_flags = os.getenv("SQLITE_FLAGS", "-q").split(" ")
def init_limbo():
pipe = subprocess.Popen(
[sqlite_exec, *sqlite_flags],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=0,
)
return pipe
def execute_sql(pipe, sql):
end_suffix = "END_OF_RESULT"
write_to_pipe(pipe, sql)
write_to_pipe(pipe, f"SELECT '{end_suffix}';\n")
stdout = pipe.stdout
stderr = pipe.stderr
output = ""
while True:
ready_to_read, _, error_in_pipe = select.select(
[stdout, stderr], [], [stdout, stderr]
)
ready_to_read_or_err = set(ready_to_read + error_in_pipe)
if stderr in ready_to_read_or_err:
exit_on_error(stderr)
if stdout in ready_to_read_or_err:
fragment = stdout.read(select.PIPE_BUF)
output += fragment.decode()
if output.rstrip().endswith(end_suffix):
output = output.rstrip().removesuffix(end_suffix)
break
output = strip_each_line(output)
return output
def strip_each_line(lines: str) -> str:
lines = lines.split("\n")
lines = [line.strip() for line in lines if line != ""]
return "\n".join(lines)
def write_to_pipe(pipe, command):
if pipe.stdin is None:
raise RuntimeError("Failed to write to shell")
pipe.stdin.write((command + "\n").encode())
pipe.stdin.flush()
def exit_on_error(stderr):
while True:
ready_to_read, _, _ = select.select([stderr], [], [])
if not ready_to_read:
break
print(stderr.read().decode(), end="")
exit(1)
def run_test(pipe, sql, validator=None):
print(f"Running test: {sql}")
result = execute_sql(pipe, sql)
if validator is not None:
if not validator(result):
print(f"Test FAILED: {sql}")
print(f"Returned: {result}")
raise Exception("Validation failed")
print("Test PASSED")
def validate_blob(result):
# HACK: blobs are difficult to test because the shell
# tries to return them as utf8 strings, so we call hex
# and assert they are valid hex digits
return int(result, 16) is not None
def validate_string_uuid(result):
return len(result) == 36 and result.count("-") == 4
def returns_null(result):
return result == "" or result == b"\n" or result == b""
def assert_now_unixtime(result):
return result == str(int(time.time()))
def assert_specific_time(result):
return result == "1736720789"
def main():
specific_time = "01945ca0-3189-76c0-9a8f-caf310fc8b8e"
extension_path = "./target/debug/liblimbo_uuid.so"
pipe = init_limbo()
try:
# before extension loads, assert no function
run_test(pipe, "SELECT uuid4();", returns_null)
run_test(pipe, "SELECT uuid4_str();", returns_null)
run_test(pipe, f".load {extension_path}", returns_null)
print("Extension loaded successfully.")
run_test(pipe, "SELECT hex(uuid4());", validate_blob)
run_test(pipe, "SELECT uuid4_str();", validate_string_uuid)
run_test(pipe, "SELECT hex(uuid7());", validate_blob)
run_test(
pipe,
"SELECT uuid7_timestamp_ms(uuid7()) / 1000;",
)
run_test(pipe, "SELECT uuid7_str();", validate_string_uuid)
run_test(pipe, "SELECT uuid_str(uuid7());", validate_string_uuid)
run_test(pipe, "SELECT hex(uuid_blob(uuid7_str()));", validate_blob)
run_test(pipe, "SELECT uuid_str(uuid_blob(uuid7_str()));", validate_string_uuid)
run_test(
pipe,
f"SELECT uuid7_timestamp_ms('{specific_time}') / 1000;",
assert_specific_time,
)
except Exception as e:
print(f"Test FAILED: {e}")
pipe.terminate()
exit(1)
pipe.terminate()
print("All tests passed successfully.")
if __name__ == "__main__":
main()

View File

@@ -1,3 +1,18 @@
#!/usr/bin/env tclsh
set testdir [file dirname $argv0]
source $testdir/tester.tcl
source $testdir/tester.tcl
do_execsql_test_on_specific_db {:memory:} basic-insert {
create table temp (t1 integer, primary key (t1));
insert into temp values (1);
select * from temp;
} {1}
do_execsql_test_on_specific_db {:memory:} must-be-int-insert {
create table temp (t1 integer, primary key (t1));
insert into temp values (1),(2.0),('3'),('4.0');
select * from temp;
} {1
2
3
4}

View File

@@ -106,6 +106,14 @@ Jamie|coat
Jamie|accessories
Cindy|}
do_execsql_test left-join-row-id {
select u.rowid, p.rowid from users u left join products as p on u.rowid = p.rowid where u.rowid >= 10 limit 5;
} {10|10
11|11
12|
13|
14|}
do_execsql_test left-join-constant-condition-true {
select u.first_name, p.name from users u left join products as p on true limit 1;
} {Jamie|hat}

View File

@@ -459,6 +459,118 @@ do_execsql_test bitwise-and-int-agg-int-agg {
} {66}
foreach {testname lhs rhs ans} {
int-int 1 2 4
int-neg_int 8 -2 2
int-float 1 4.0 16
int-text 1 'a' 1
int-text_float 1 '3.0' 8
int-text_int 1 '1' 2
int-null 1 NULL {}
int-int-overflow 1 64 0
int-int-underflow 1 -64 0
int-float-overflow 1 64.0 0
int-float-underflow 1 -64.0 0
} {
do_execsql_test shift-left-$testname "SELECT $lhs << $rhs" $::ans
}
foreach {testname lhs rhs ans} {
float-int 1.0 2 4
float-neg_int 8.0 -2 2
float-float 1.0 4.0 16
float-text 1.0 'a' 1
float-text_float 1.0 '3.0' 8
float-text_int 1.0 '1' 2
float-null 1.0 NULL {}
float-int-overflow 1.0 64 0
float-int-underflow 1.0 -64 0
float-float-overflow 1.0 64.0 0
float-float-underflow 1.0 -64.0 0
} {
do_execsql_test shift-left-$testname "SELECT $lhs << $rhs" $::ans
}
foreach {testname lhs rhs ans} {
text-int 'a' 2 0
text-float 'a' 4.0 0
text-text 'a' 'a' 0
text_int-text_int '1' '1' 2
text_int-text_float '1' '3.0' 8
text_int-text '1' 'a' 1
text_float-text_int '1.0' '1' 2
text_float-text_float '1.0' '3.0' 8
text_float-text '1.0' 'a' 1
text-null '1' NULL {}
} {
do_execsql_test shift-left-$testname "SELECT $lhs << $rhs" $::ans
}
foreach {testname lhs rhs ans} {
null-int NULL 2 {}
null-float NULL 4.0 {}
null-text NULL 'a' {}
null-null NULL NULL {}
} {
do_execsql_test shift-left-$testname "SELECT $lhs << $rhs" $::ans
}
foreach {testname lhs rhs ans} {
int-int 8 2 2
int-neg_int 8 -2 32
int-float 8 1.0 4
int-text 8 'a' 8
int-text_float 8 '3.0' 1
int-text_int 8 '1' 4
int-null 8 NULL {}
int-int-overflow 8 64 0
int-int-underflow 8 -64 0
int-float-overflow 8 64.0 0
int-float-underflow 8 -64.0 0
} {
do_execsql_test shift-right-$testname "SELECT $lhs >> $rhs" $::ans
}
foreach {testname lhs rhs ans} {
float-int 8.0 2 2
float-neg_int 8.0 -2 32
float-float 8.0 1.0 4
float-text 8.0 'a' 8
float-text_float 8.0 '3.0' 1
float-text_int 8.0 '1' 4
float-null 8.0 NULL {}
float-int-overflow 8.0 64 0
float-int-underflow 8.0 -64 0
float-float-overflow 8.0 64.0 0
float-float-underflow 8.0 -64.0 0
} {
do_execsql_test shift-right-$testname "SELECT $lhs >> $rhs" $::ans
}
foreach {testname lhs rhs ans} {
text-int 'a' 2 0
text-float 'a' 4.0 0
text-text 'a' 'a' 0
text_int-text_int '8' '1' 4
text_int-text_float '8' '3.0' 1
text_int-text '8' 'a' 8
text_float-text_int '8.0' '1' 4
text_float-text_float '8.0' '3.0' 1
text_float-text '8.0' 'a' 8
text-null '8' NULL {}
} {
do_execsql_test shift-right-$testname "SELECT $lhs >> $rhs" $::ans
}
foreach {testname lhs rhs ans} {
null-int NULL 2 {}
null-float NULL 4.0 {}
null-text NULL 'a' {}
null-null NULL NULL {}
} {
do_execsql_test shift-right-$testname "SELECT $lhs >> $rhs" $::ans
}
do_execsql_test bitwise-not-null {
SELECT ~NULL
} {}

View File

@@ -80,6 +80,14 @@ do_execsql_test select_with_quoting_2 {
select "users".`id` from users where `users`.[id] = 5;
} {5}
do_execsql_test select-rowid {
select rowid, first_name from users u where rowid = 5;
} {5|Edward}
do_execsql_test select-rowid-2 {
select u.rowid, first_name from users u where rowid = 5;
} {5|Edward}
do_execsql_test seekrowid {
select * from users u where u.id = 5;
} {"5|Edward|Miller|christiankramer@example.com|725-281-1033|08522 English Plain|Lake Keith|ID|23283|15"}

View File

@@ -365,3 +365,21 @@ do_execsql_test nested-parens-conditionals-and-double-or {
8171|Andrea|Lee|dgarrison@example.com|001-594-430-0646|452 Anthony Stravenue|Sandraville|CA|28572|12
9110|Anthony|Barrett|steven05@example.net|(562)928-9177x8454|86166 Foster Inlet Apt. 284|North Jeffreyburgh|CA|80147|97
9279|Annette|Lynn|joanne37@example.com|(272)700-7181|2676 Laura Points Apt. 683|Tristanville|NY|48646|91}}
# Regression test for nested parens + OR + AND. This returned 0 rows before the fix.
# It should always return 1 row because it is true for id = 6.
do_execsql_test nested-parens-and-inside-or-regression-test {
SELECT count(1) FROM users
WHERE (
(
(
(id != 5)
AND
(id = 5 OR TRUE)
)
OR FALSE
)
AND
(id = 6 OR FALSE)
);
} {1}

View File

@@ -441,7 +441,12 @@ impl Splitter for Tokenizer {
// do not include the '?' in the token
Ok((Some((&data[1..=i], TK_VARIABLE)), i + 1))
}
None => Ok((Some((&data[1..], TK_VARIABLE)), data.len())),
None => {
if !data[1..].is_empty() && data[1..].iter().all(|ch| *ch == b'0') {
return Err(Error::BadVariableName(None, None));
}
Ok((Some((&data[1..], TK_VARIABLE)), data.len()))
}
}
}
b'$' | b'@' | b'#' | b':' => {

View File

@@ -194,6 +194,9 @@ impl CreateTableBody {
{
let mut generated_count = 0;
for c in columns.values() {
if c.col_name == "rowid" {
return Err(custom_err!("cannot use reserved word: ROWID"));
}
for cs in &c.constraints {
if let ColumnConstraint::Generated { .. } = cs.constraint {
generated_count += 1;

View File

@@ -728,6 +728,7 @@ impl ToTokens for Expr {
}
s.append(TK_RP, None)
}
Self::RowId { .. } => Ok(()),
Self::Subquery(query) => {
s.append(TK_LP, None)?;
query.to_tokens(s)?;

View File

@@ -338,6 +338,13 @@ pub enum Expr {
/// is the column a rowid alias
is_rowid_alias: bool,
},
/// `ROWID`
RowId {
/// the x in `x.y.z`. index of the db in catalog.
database: Option<usize>,
/// the y in `x.y.z`. index of the table in catalog.
table: usize,
},
/// `IN`
InList {
/// expression