diff --git a/.github/workflows/antithesis.yml b/.github/workflows/antithesis.yml
index 60d5cff52..bcd767447 100644
--- a/.github/workflows/antithesis.yml
+++ b/.github/workflows/antithesis.yml
@@ -13,7 +13,7 @@ env:
ANTITHESIS_PASSWD: ${{ secrets.ANTITHESIS_PASSWD }}
ANTITHESIS_DOCKER_HOST: us-central1-docker.pkg.dev
ANTITHESIS_DOCKER_REPO: ${{ secrets.ANTITHESIS_DOCKER_REPO }}
- ANTITHESIS_EMAIL: "penberg@turso.tech;pmuniz@turso.tech;pere@turso.tech"
+ ANTITHESIS_EMAIL: ${{ secrets.ANTITHESIS_EMAIL }}
ANTITHESIS_REGISTRY_KEY: ${{ secrets.ANTITHESIS_REGISTRY_KEY }}
jobs:
diff --git a/.github/workflows/long_fuzz_tests_btree.yml b/.github/workflows/long_fuzz_tests_btree.yml
index 49e5a252e..0f38f67bf 100644
--- a/.github/workflows/long_fuzz_tests_btree.yml
+++ b/.github/workflows/long_fuzz_tests_btree.yml
@@ -28,10 +28,6 @@ jobs:
run: cargo test -- --ignored fuzz_long
env:
RUST_BACKTRACE: 1
- - name: Run ignored long tests with index
- run: cargo test -- --ignored fuzz_long
- env:
- RUST_BACKTRACE: 1
simple-stress-test:
runs-on: blacksmith-4vcpu-ubuntu-2404
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 65e19f332..fa7cd6b0d 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -73,19 +73,12 @@ jobs:
with:
prefix-key: "v1-rust" # can be updated if we need to reset caches due to non-trivial change in the dependencies (for example, custom env var were set for single workspace project)
- name: Install the project
- run: ./scripts/run-sim --iterations 50
+ run: ./scripts/run-sim --maximum-tests 2000 loop -n 50 -s
test-limbo:
runs-on: blacksmith-4vcpu-ubuntu-2404
timeout-minutes: 20
steps:
- - name: Install cargo-c
- env:
- LINK: https://github.com/lu-zero/cargo-c/releases/download/v0.10.7
- CARGO_C_FILE: cargo-c-x86_64-unknown-linux-musl.tar.gz
- run: |
- curl -L $LINK/$CARGO_C_FILE | tar xz -C ~/.cargo/bin
-
- uses: actions/checkout@v3
- name: Install uv
@@ -96,9 +89,6 @@ jobs:
- name: Set up Python
run: uv python install
- - name: Install the project
- run: uv sync --all-extras --dev --all-packages
-
- uses: "./.github/shared/install_sqlite"
- name: Test
run: make test
diff --git a/Cargo.lock b/Cargo.lock
index 2e8b0bdfc..d8024a08f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -571,7 +571,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_tester"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"assert_cmd",
@@ -1870,14 +1870,14 @@ dependencies = [
[[package]]
name = "limbo-go"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"turso_core",
]
[[package]]
name = "limbo-wasm"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"console_error_panic_hook",
"getrandom 0.2.15",
@@ -1890,7 +1890,7 @@ dependencies = [
[[package]]
name = "limbo_completion"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"turso_ext",
@@ -1898,7 +1898,7 @@ dependencies = [
[[package]]
name = "limbo_crypto"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"blake3",
"data-encoding",
@@ -1911,7 +1911,7 @@ dependencies = [
[[package]]
name = "limbo_csv"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"csv",
"mimalloc",
@@ -1921,7 +1921,7 @@ dependencies = [
[[package]]
name = "limbo_ipaddr"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"ipnetwork",
"mimalloc",
@@ -1930,7 +1930,7 @@ dependencies = [
[[package]]
name = "limbo_percentile"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"turso_ext",
@@ -1938,7 +1938,7 @@ dependencies = [
[[package]]
name = "limbo_regexp"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"regex",
@@ -1947,7 +1947,7 @@ dependencies = [
[[package]]
name = "limbo_sim"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anarchist-readable-name-generator-lib",
"anyhow",
@@ -1973,7 +1973,7 @@ dependencies = [
[[package]]
name = "limbo_sqlite3"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"env_logger 0.11.7",
"libc",
@@ -1986,7 +1986,7 @@ dependencies = [
[[package]]
name = "limbo_sqlite_test_ext"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"cc",
]
@@ -2222,6 +2222,8 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
+ "regex",
+ "semver",
"syn 2.0.100",
]
@@ -2464,45 +2466,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
-[[package]]
-name = "phf"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
-dependencies = [
- "phf_shared",
-]
-
-[[package]]
-name = "phf_codegen"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
-dependencies = [
- "phf_generator",
- "phf_shared",
-]
-
-[[package]]
-name = "phf_generator"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
-dependencies = [
- "phf_shared",
- "rand 0.8.5",
-]
-
-[[package]]
-name = "phf_shared"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
-dependencies = [
- "siphasher",
- "uncased",
-]
-
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@@ -2690,7 +2653,7 @@ dependencies = [
[[package]]
name = "py-turso"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"pyo3",
@@ -3285,12 +3248,6 @@ dependencies = [
"libc",
]
-[[package]]
-name = "siphasher"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
-
[[package]]
name = "slab"
version = "0.4.9"
@@ -3302,9 +3259,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.14.0"
+version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "socket2"
@@ -3801,7 +3758,7 @@ dependencies = [
[[package]]
name = "turso"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"tempfile",
"thiserror 2.0.12",
@@ -3811,7 +3768,7 @@ dependencies = [
[[package]]
name = "turso-java"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"jni",
"thiserror 2.0.12",
@@ -3820,7 +3777,7 @@ dependencies = [
[[package]]
name = "turso_cli"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"cfg-if",
@@ -3851,7 +3808,7 @@ dependencies = [
[[package]]
name = "turso_core"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"antithesis_sdk",
"bitflags 2.9.0",
@@ -3904,7 +3861,7 @@ dependencies = [
[[package]]
name = "turso_dart"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"flutter_rust_bridge",
"turso_core",
@@ -3912,7 +3869,7 @@ dependencies = [
[[package]]
name = "turso_ext"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"chrono",
"getrandom 0.3.2",
@@ -3921,7 +3878,7 @@ dependencies = [
[[package]]
name = "turso_ext_tests"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"env_logger 0.11.7",
"lazy_static",
@@ -3932,7 +3889,7 @@ dependencies = [
[[package]]
name = "turso_macros"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"proc-macro2",
"quote",
@@ -3941,7 +3898,7 @@ dependencies = [
[[package]]
name = "turso_node"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"napi",
"napi-build",
@@ -3951,7 +3908,7 @@ dependencies = [
[[package]]
name = "turso_sqlite3_parser"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"bitflags 2.9.0",
"cc",
@@ -3961,18 +3918,15 @@ dependencies = [
"log",
"memchr",
"miette",
- "phf",
- "phf_codegen",
- "phf_shared",
"serde",
+ "smallvec",
"strum",
"strum_macros",
- "uncased",
]
[[package]]
name = "turso_stress"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anarchist-readable-name-generator-lib",
"antithesis_sdk",
diff --git a/Cargo.toml b/Cargo.toml
index 00fb19f0c..cca1e3091 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,25 +31,25 @@ members = [
exclude = ["perf/latency/limbo"]
[workspace.package]
-version = "0.1.1"
+version = "0.1.2-pre.2"
authors = ["the Limbo authors"]
edition = "2021"
license = "MIT"
repository = "https://github.com/tursodatabase/turso"
[workspace.dependencies]
-limbo_completion = { path = "extensions/completion", version = "0.1.1" }
-turso_core = { path = "core", version = "0.1.1" }
-limbo_crypto = { path = "extensions/crypto", version = "0.1.1" }
-limbo_csv = { path = "extensions/csv", version = "0.1.1" }
-turso_ext = { path = "extensions/core", version = "0.1.1" }
-turso_ext_tests = { path = "extensions/tests", version = "0.1.1" }
-limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.1" }
-turso_macros = { path = "macros", version = "0.1.1" }
-limbo_percentile = { path = "extensions/percentile", version = "0.1.1" }
-limbo_regexp = { path = "extensions/regexp", version = "0.1.1" }
-turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.1" }
-limbo_uuid = { path = "extensions/uuid", version = "0.1.1" }
+limbo_completion = { path = "extensions/completion", version = "0.1.2-pre.2" }
+turso_core = { path = "core", version = "0.1.2-pre.2" }
+limbo_crypto = { path = "extensions/crypto", version = "0.1.2-pre.2" }
+limbo_csv = { path = "extensions/csv", version = "0.1.2-pre.2" }
+turso_ext = { path = "extensions/core", version = "0.1.2-pre.2" }
+turso_ext_tests = { path = "extensions/tests", version = "0.1.2-pre.2" }
+limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.2-pre.2" }
+turso_macros = { path = "macros", version = "0.1.2-pre.2" }
+limbo_percentile = { path = "extensions/percentile", version = "0.1.2-pre.2" }
+limbo_regexp = { path = "extensions/regexp", version = "0.1.2-pre.2" }
+turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.2-pre.2" }
+limbo_uuid = { path = "extensions/uuid", version = "0.1.2-pre.2" }
strum = { version = "0.26", features = ["derive"] }
strum_macros = "0.26"
serde = "1.0"
diff --git a/Makefile b/Makefile
index 51dbdc052..499007884 100644
--- a/Makefile
+++ b/Makefile
@@ -3,9 +3,11 @@ CURRENT_RUST_VERSION := $(shell rustc -V | sed -E 's/rustc ([0-9]+\.[0-9]+\.[0-9
CURRENT_RUST_TARGET := $(shell rustc -vV | grep host | cut -d ' ' -f 2)
RUSTUP := $(shell command -v rustup 2> /dev/null)
UNAME_S := $(shell uname -s)
+MINIMUM_TCL_VERSION := 8.6
# Executable used to execute the compatibility tests.
SQLITE_EXEC ?= scripts/limbo-sqlite3
+RUST_LOG := off
all: check-rust-version check-wasm-target limbo limbo-wasm
.PHONY: all
@@ -26,6 +28,17 @@ check-rust-version:
fi
.PHONY: check-rust-version
+check-tcl-version:
+ @printf '%s\n' \
+ 'set need "$(MINIMUM_TCL_VERSION)"' \
+ 'set have [info patchlevel]' \
+ 'if {[package vcompare $$have $$need] < 0} {' \
+ ' puts stderr "tclsh $$have found — need $$need+"' \
+ ' exit 1' \
+ '}' \
+ | tclsh
+.PHONY: check-tcl-version
+
check-wasm-target:
@echo "Checking wasm32-wasi target..."
@if ! rustup target list | grep -q "wasm32-wasi (installed)"; then \
@@ -51,27 +64,31 @@ uv-sync:
uv sync --all-packages
.PHONE: uv-sync
-test: limbo uv-sync test-compat test-vector test-sqlite3 test-shell test-extensions test-memory test-write test-update test-constraint test-collate
+uv-sync-test:
+ uv sync --all-extras --dev --package turso_test
+.PHONE: uv-sync
+
+test: limbo uv-sync-test test-compat test-vector test-sqlite3 test-shell test-memory test-write test-update test-constraint test-collate test-extensions
.PHONY: test
-test-extensions: limbo uv-sync
- uv run --project limbo_test test-extensions
+test-extensions: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) uv run --project limbo_test test-extensions
.PHONY: test-extensions
-test-shell: limbo uv-sync
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-shell
+test-shell: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-shell
.PHONY: test-shell
-test-compat:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/all.test
+test-compat: check-tcl-version
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/all.test
.PHONY: test-compat
test-vector:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/vector.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/vector.test
.PHONY: test-vector
test-time:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/time.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/time.test
.PHONY: test-time
reset-db:
@@ -85,48 +102,48 @@ test-sqlite3: reset-db
.PHONY: test-sqlite3
test-json:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/json.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/json.test
.PHONY: test-json
-test-memory: limbo uv-sync
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-memory
+test-memory: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-memory
.PHONY: test-memory
-test-write: limbo uv-sync
+test-write: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-write; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-write; \
else \
echo "Skipping test-write: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-write
-test-update: limbo uv-sync
+test-update: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-update; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-update; \
else \
echo "Skipping test-update: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-update
-test-collate: limbo uv-sync
+test-collate: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-collate; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-collate; \
else \
echo "Skipping test-collate: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-collate
-test-constraint: limbo uv-sync
+test-constraint: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-constraint; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-constraint; \
else \
echo "Skipping test-constraint: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-constraint
-bench-vfs: uv-sync
+bench-vfs: uv-sync-test
cargo build --release
- uv run --project limbo_test bench-vfs "$(SQL)" "$(N)"
+ RUST_LOG=$(RUST_LOG) uv run --project limbo_test bench-vfs "$(SQL)" "$(N)"
clickbench:
./perf/clickbench/benchmark.sh
diff --git a/README.md b/README.md
index eba882bc1..98a13f674 100644
--- a/README.md
+++ b/README.md
@@ -224,9 +224,11 @@ terms or conditions.
Thanks to all the partners of Turso!
+
+
-
+
## Contributors
diff --git a/antithesis-tests/bank-test/first_setup.py b/antithesis-tests/bank-test/first_setup.py
index fcbc33530..64ad06457 100755
--- a/antithesis-tests/bank-test/first_setup.py
+++ b/antithesis-tests/bank-test/first_setup.py
@@ -50,3 +50,5 @@ cur.execute(f"""
INSERT INTO initial_state (num_accts, total)
VALUES ({num_accts}, {total})
""")
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/first_setup.py b/antithesis-tests/stress-composer/first_setup.py
index 9d755a071..45b37466f 100755
--- a/antithesis-tests/stress-composer/first_setup.py
+++ b/antithesis-tests/stress-composer/first_setup.py
@@ -83,4 +83,6 @@ for i in range(tbl_count):
CREATE TABLE tbl_{i} ({cols_str})
""")
+con.commit()
+
print(f"DB Schemas\n------------\n{json.dumps(schemas, indent=2)}")
diff --git a/antithesis-tests/stress-composer/parallel_driver_delete.py b/antithesis-tests/stress-composer/parallel_driver_delete.py
index 4ec62079b..d2e719fec 100755
--- a/antithesis-tests/stress-composer/parallel_driver_delete.py
+++ b/antithesis-tests/stress-composer/parallel_driver_delete.py
@@ -37,6 +37,13 @@ print(f"Attempt to delete {deletions} rows in tbl_{selected_tbl}...")
for i in range(deletions):
where_clause = f"col_{pk} = {generate_random_value(tbl_schema[f'col_{pk}']['data_type'])}"
- cur.execute(f"""
- DELETE FROM tbl_{selected_tbl} WHERE {where_clause}
- """)
+ try:
+ cur.execute(f"""
+ DELETE FROM tbl_{selected_tbl} WHERE {where_clause}
+ """)
+ except turso.OperationalError:
+ con.rollback()
+ # Re-raise other operational errors
+ raise
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/parallel_driver_insert.py b/antithesis-tests/stress-composer/parallel_driver_insert.py
index 8e4f73e1f..bb5a02170 100755
--- a/antithesis-tests/stress-composer/parallel_driver_insert.py
+++ b/antithesis-tests/stress-composer/parallel_driver_insert.py
@@ -44,5 +44,8 @@ for i in range(insertions):
# Ignore UNIQUE constraint violations
pass
else:
+ con.rollback()
# Re-raise other operational errors
raise
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py b/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
index d101fcfc5..594925797 100755
--- a/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
+++ b/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
@@ -17,8 +17,7 @@ cur_init = con_init.cursor()
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
-tbl_schema = json.loads(cur_init.execute(
- f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
+tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
tbl_name = f"tbl_{selected_tbl}"
@@ -29,8 +28,7 @@ except Exception as e:
exit(0)
cur = con.cursor()
-cur.execute(
- "SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
+cur.execute("SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
result = cur.fetchone()
@@ -47,10 +45,8 @@ cur.execute("ALTER TABLE " + tbl_name + " RENAME TO " + tbl_name + "_old")
con.rollback()
cur = con.cursor()
-cur.execute(
- "SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
+cur.execute("SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
schema_after = cur.fetchone()[0]
-always(schema_before == schema_after,
- "schema should be the same after rollback", {})
+always(schema_before == schema_after, "schema should be the same after rollback", {})
diff --git a/antithesis-tests/stress-composer/parallel_driver_update.py b/antithesis-tests/stress-composer/parallel_driver_update.py
index e30d53acd..101508cc2 100755
--- a/antithesis-tests/stress-composer/parallel_driver_update.py
+++ b/antithesis-tests/stress-composer/parallel_driver_update.py
@@ -58,5 +58,8 @@ for i in range(updates):
# Ignore UNIQUE constraint violations
pass
else:
+ con.rollback()
# Re-raise other operational errors
raise
+
+con.commit()
diff --git a/assets/turso-nyrkio.png b/assets/turso-nyrkio.png
new file mode 100644
index 000000000..e65fcd7f5
Binary files /dev/null and b/assets/turso-nyrkio.png differ
diff --git a/bindings/go/rs_src/rows.rs b/bindings/go/rs_src/rows.rs
index 0e7e1bfbc..98739e83a 100644
--- a/bindings/go/rs_src/rows.rs
+++ b/bindings/go/rs_src/rows.rs
@@ -7,7 +7,7 @@ use turso_core::{LimboError, Statement, StepResult, Value};
pub struct LimboRows<'conn> {
stmt: Box,
- conn: &'conn mut LimboConn,
+ _conn: &'conn mut LimboConn,
err: Option,
}
@@ -15,7 +15,7 @@ impl<'conn> LimboRows<'conn> {
pub fn new(stmt: Statement, conn: &'conn mut LimboConn) -> Self {
LimboRows {
stmt: Box::new(stmt),
- conn,
+ _conn: conn,
err: None,
}
}
@@ -55,8 +55,12 @@ pub extern "C" fn rows_next(ctx: *mut c_void) -> ResultCode {
Ok(StepResult::Row) => ResultCode::Row,
Ok(StepResult::Done) => ResultCode::Done,
Ok(StepResult::IO) => {
- let _ = ctx.conn.io.run_once();
- ResultCode::Io
+ let res = ctx.stmt.run_once();
+ if res.is_err() {
+ ResultCode::Error
+ } else {
+ ResultCode::Io
+ }
}
Ok(StepResult::Busy) => ResultCode::Busy,
Ok(StepResult::Interrupt) => ResultCode::Interrupt,
diff --git a/bindings/go/rs_src/statement.rs b/bindings/go/rs_src/statement.rs
index 970ecd7cf..e1b5ae26b 100644
--- a/bindings/go/rs_src/statement.rs
+++ b/bindings/go/rs_src/statement.rs
@@ -64,7 +64,10 @@ pub extern "C" fn stmt_execute(
return ResultCode::Done;
}
Ok(StepResult::IO) => {
- let _ = stmt.conn.io.run_once();
+ let res = statement.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
}
Ok(StepResult::Busy) => {
return ResultCode::Busy;
diff --git a/bindings/java/rs_src/turso_connection.rs b/bindings/java/rs_src/turso_connection.rs
index 1d2ae9f10..8a55bf169 100644
--- a/bindings/java/rs_src/turso_connection.rs
+++ b/bindings/java/rs_src/turso_connection.rs
@@ -13,12 +13,12 @@ use turso_core::Connection;
#[derive(Clone)]
pub struct TursoConnection {
pub(crate) conn: Arc,
- pub(crate) io: Arc,
+ pub(crate) _io: Arc,
}
impl TursoConnection {
pub fn new(conn: Arc, io: Arc) -> Self {
- TursoConnection { conn, io }
+ TursoConnection { conn, _io: io }
}
#[allow(clippy::wrong_self_convention)]
diff --git a/bindings/java/rs_src/turso_statement.rs b/bindings/java/rs_src/turso_statement.rs
index 17eaa5a5b..444d34707 100644
--- a/bindings/java/rs_src/turso_statement.rs
+++ b/bindings/java/rs_src/turso_statement.rs
@@ -76,7 +76,7 @@ pub extern "system" fn Java_tech_turso_core_TursoStatement_step<'local>(
};
}
StepResult::IO => {
- if let Err(e) = stmt.connection.io.run_once() {
+ if let Err(e) = stmt.stmt.run_once() {
set_err_msg_and_throw_exception(&mut env, obj, TURSO_ETC, e.to_string());
return to_turso_step_result(&mut env, STEP_RESULT_ID_ERROR, None);
}
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
index 88c76dd85..6841a5cbc 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
@@ -24,7 +24,9 @@ public final class JDBC4Connection implements Connection {
}
public TursoStatement prepare(String sql) throws SQLException {
- return connection.prepare(sql);
+ final TursoStatement statement = connection.prepare(sql);
+ statement.initializeColumnMetadata();
+ return statement;
}
@Override
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
index e947aa272..a3f8b3d4d 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
@@ -34,7 +34,6 @@ public final class JDBC4PreparedStatement extends JDBC4Statement implements Prep
super(connection);
this.sql = sql;
this.statement = connection.prepare(sql);
- this.statement.initializeColumnMetadata();
this.resultSet = new JDBC4ResultSet(this.statement.getResultSet());
}
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
index 23421bc51..85dee794d 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
@@ -319,10 +319,8 @@ public final class JDBC4ResultSet implements ResultSet, ResultSetMetaData {
}
@Override
- @SkipNullableCheck
public Object getObject(int columnIndex) throws SQLException {
- // TODO
- return null;
+ return resultSet.get(columnIndex);
}
@Override
@@ -1226,20 +1224,22 @@ public final class JDBC4ResultSet implements ResultSet, ResultSetMetaData {
@Override
public int getColumnDisplaySize(int column) throws SQLException {
- // TODO
- return 0;
+ return Integer.MAX_VALUE;
}
@Override
public String getColumnLabel(int column) throws SQLException {
- // TODO
- return "";
+ // TODO: should consider "AS" keyword
+ return getColumnName(column);
}
@Override
public String getColumnName(int column) throws SQLException {
- // TODO
- return "";
+ if (column > 0 && column <= resultSet.getColumnNames().length) {
+ return resultSet.getColumnNames()[column - 1];
+ }
+
+ throw new SQLException("Index out of bound: " + column);
}
@Override
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
index 02831dbdd..b86b838f5 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
@@ -18,6 +18,7 @@ public class JDBC4Statement implements Statement {
private final JDBC4Connection connection;
@Nullable protected TursoStatement statement = null;
+ protected long updateCount;
// Because JDBC4Statement has different life cycle in compared to tursoStatement, let's use this
// field to manage JDBC4Statement lifecycle
@@ -173,8 +174,10 @@ public class JDBC4Statement implements Statement {
// TODO: if sql is a readOnly query, do we still need the locks?
connectionLock.lock();
statement = connection.prepare(sql);
+ final long previousChanges = statement.totalChanges();
final boolean result = statement.execute();
updateGeneratedKeys();
+ updateCount = statement.totalChanges() - previousChanges;
return result;
} finally {
@@ -186,19 +189,13 @@ public class JDBC4Statement implements Statement {
@Override
public ResultSet getResultSet() throws SQLException {
requireNonNull(statement, "statement is null");
+ ensureOpen();
return new JDBC4ResultSet(statement.getResultSet());
}
@Override
public int getUpdateCount() throws SQLException {
- // TODO
- return 0;
- }
-
- @Override
- public boolean getMoreResults() throws SQLException {
- // TODO
- return false;
+ return (int) updateCount;
}
@Override
@@ -254,9 +251,22 @@ public class JDBC4Statement implements Statement {
return connection;
}
+ @Override
+ public boolean getMoreResults() throws SQLException {
+ return getMoreResults(Statement.CLOSE_CURRENT_RESULT);
+ }
+
@Override
public boolean getMoreResults(int current) throws SQLException {
- // TODO
+ requireNonNull(statement, "statement should not be null");
+
+ if (current != Statement.CLOSE_CURRENT_RESULT) {
+ throw new SQLException("Invalid argument");
+ }
+
+ statement.getResultSet().close();
+ updateCount = -1;
+
return false;
}
diff --git a/bindings/javascript/Cargo.toml b/bindings/javascript/Cargo.toml
index f02a8abe9..7d62ede4c 100644
--- a/bindings/javascript/Cargo.toml
+++ b/bindings/javascript/Cargo.toml
@@ -13,7 +13,7 @@ crate-type = ["cdylib"]
[dependencies]
turso_core = { workspace = true }
napi = { version = "2.16.17", default-features = false, features = ["napi4"] }
-napi-derive = { version = "2.16.13", default-features = false }
+napi-derive = { version = "2.16.13", default-features = true }
[build-dependencies]
napi-build = "2.2.0"
diff --git a/bindings/javascript/__test__/better-sqlite3.spec.mjs b/bindings/javascript/__test__/better-sqlite3.spec.mjs
index a5fdf93fe..992cda585 100644
--- a/bindings/javascript/__test__/better-sqlite3.spec.mjs
+++ b/bindings/javascript/__test__/better-sqlite3.spec.mjs
@@ -32,7 +32,7 @@ const genDatabaseFilename = () => {
return `test-${crypto.randomBytes(8).toString('hex')}.db`;
};
-new DualTest().onlySqlitePasses("opening a read-only database fails if the file doesn't exist", async (t) => {
+new DualTest().both("opening a read-only database fails if the file doesn't exist", async (t) => {
t.throws(() => t.context.connect(genDatabaseFilename(), { readonly: true }),
{
any: true,
@@ -104,7 +104,21 @@ inMemoryTest.both("Empty prepared statement should throw", async (t) => {
() => {
db.prepare("");
},
- { instanceOf: Error },
+ { any: true }
+ );
+});
+
+inMemoryTest.onlySqlitePasses("Empty prepared statement should throw the correct error", async (t) => {
+ // the previous test can be removed once this one passes in Turso
+ const db = t.context.db;
+ t.throws(
+ () => {
+ db.prepare("");
+ },
+ {
+ instanceOf: RangeError,
+ message: "The supplied SQL string contains no statements",
+ },
);
});
@@ -156,9 +170,12 @@ inMemoryTest.both("Statement shouldn't bind twice with bind()", async (t) => {
t.throws(
() => {
- db.bind("Bob");
+ stmt.bind("Bob");
+ },
+ {
+ instanceOf: TypeError,
+ message: 'The bind() method can only be invoked once per statement object',
},
- { instanceOf: Error },
);
});
@@ -372,3 +389,4 @@ inMemoryTest.both("Test Statement.source", async t => {
t.is(stmt.source, sql);
});
+
diff --git a/bindings/javascript/__test__/sync.spec.mjs b/bindings/javascript/__test__/sync.spec.mjs
index f8c016021..87fe1d7d4 100644
--- a/bindings/javascript/__test__/sync.spec.mjs
+++ b/bindings/javascript/__test__/sync.spec.mjs
@@ -377,7 +377,7 @@ dualTest.both("Database.pragma()", async (t) => {
t.deepEqual(db.pragma("cache_size"), [{ "cache_size": 2000 }]);
});
-dualTest.onlySqlitePasses("errors", async (t) => {
+dualTest.both("errors", async (t) => {
const db = t.context.db;
const syntaxError = await t.throws(() => {
@@ -385,7 +385,7 @@ dualTest.onlySqlitePasses("errors", async (t) => {
}, {
any: true,
instanceOf: t.context.errorType,
- message: 'near "SYNTAX": syntax error',
+ message: /near "SYNTAX": syntax error/,
code: 'SQLITE_ERROR'
});
const noTableError = await t.throws(() => {
@@ -393,7 +393,7 @@ dualTest.onlySqlitePasses("errors", async (t) => {
}, {
any: true,
instanceOf: t.context.errorType,
- message: "no such table: missing_table",
+ message: /(Parse error: Table missing_table not found|no such table: missing_table)/,
code: 'SQLITE_ERROR'
});
diff --git a/bindings/javascript/index.d.ts b/bindings/javascript/index.d.ts
index 99433b962..37041f67a 100644
--- a/bindings/javascript/index.d.ts
+++ b/bindings/javascript/index.d.ts
@@ -3,41 +3,41 @@
/* auto-generated by NAPI-RS */
-export interface Options {
- readonly: boolean
- fileMustExist: boolean
- timeout: number
+export interface OpenDatabaseOptions {
+ readonly?: boolean
+ fileMustExist?: boolean
+ timeout?: number
+}
+export interface PragmaOptions {
+ simple: boolean
}
export declare class Database {
memory: boolean
readonly: boolean
- inTransaction: boolean
open: boolean
name: string
- constructor(path: string, options?: Options | undefined | null)
+ constructor(path: string, options?: OpenDatabaseOptions | undefined | null)
prepare(sql: string): Statement
- transaction(): void
- pragma(): void
+ pragma(pragmaName: string, options?: PragmaOptions | undefined | null): unknown
backup(): void
serialize(): void
function(): void
aggregate(): void
table(): void
- loadExtension(): void
+ loadExtension(path: string): void
+ exec(sql: string): void
+ close(): void
}
export declare class Statement {
- database: Database
source: string
- reader: boolean
- readonly: boolean
- busy: boolean
- get(): unknown
- all(): NapiResult
- run(args: Array): void
- static iterate(): void
- static pluck(): void
+ get(args?: Array | undefined | null): unknown
+ run(args?: Array | undefined | null): unknown
+ iterate(args?: Array | undefined | null): IteratorStatement
+ all(args?: Array | undefined | null): unknown
+ pluck(pluck?: boolean | undefined | null): void
static expand(): void
- static raw(): void
+ raw(raw?: boolean | undefined | null): void
static columns(): void
- static bind(): void
+ bind(args?: Array | undefined | null): Statement
}
+export declare class IteratorStatement { }
diff --git a/bindings/javascript/index.js b/bindings/javascript/index.js
index 4e9bf54a7..c1f087ea5 100644
--- a/bindings/javascript/index.js
+++ b/bindings/javascript/index.js
@@ -5,325 +5,313 @@
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
-const { join } = require("path");
+const { join } = require('path')
-const { platform, arch } = process;
+const { platform, arch } = process
-let nativeBinding = null;
-let localFileExisted = false;
-let loadError = null;
+let nativeBinding = null
+let localFileExisted = false
+let loadError = null
function isMusl() {
// For Node 10
- if (!process.report || typeof process.report.getReport !== "function") {
+ if (!process.report || typeof process.report.getReport !== 'function') {
try {
- const lddPath = require("child_process")
- .execSync("which ldd")
- .toString()
- .trim();
- return readFileSync(lddPath, "utf8").includes("musl");
+ const lddPath = require('child_process').execSync('which ldd').toString().trim()
+ return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
- return true;
+ return true
}
} else {
- const { glibcVersionRuntime } = process.report.getReport().header;
- return !glibcVersionRuntime;
+ const { glibcVersionRuntime } = process.report.getReport().header
+ return !glibcVersionRuntime
}
}
switch (platform) {
- case "android":
+ case 'android':
switch (arch) {
- case "arm64":
- localFileExisted = existsSync(
- join(__dirname, "turso.android-arm64.node"),
- );
+ case 'arm64':
+ localFileExisted = existsSync(join(__dirname, 'turso.android-arm64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.android-arm64.node");
+ nativeBinding = require('./turso.android-arm64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-android-arm64");
+ nativeBinding = require('@tursodatabase/turso-android-arm64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm":
- localFileExisted = existsSync(
- join(__dirname, "turso.android-arm-eabi.node"),
- );
+ break
+ case 'arm':
+ localFileExisted = existsSync(join(__dirname, 'turso.android-arm-eabi.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.android-arm-eabi.node");
+ nativeBinding = require('./turso.android-arm-eabi.node')
} else {
- nativeBinding = require("@tursodatabase/turso-android-arm-eabi");
+ nativeBinding = require('@tursodatabase/turso-android-arm-eabi')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Android ${arch}`);
+ throw new Error(`Unsupported architecture on Android ${arch}`)
}
- break;
- case "win32":
+ break
+ case 'win32':
switch (arch) {
- case "x64":
+ case 'x64':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-x64-msvc.node"),
- );
+ join(__dirname, 'turso.win32-x64-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-x64-msvc.node");
+ nativeBinding = require('./turso.win32-x64-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-x64-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-x64-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "ia32":
+ break
+ case 'ia32':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-ia32-msvc.node"),
- );
+ join(__dirname, 'turso.win32-ia32-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-ia32-msvc.node");
+ nativeBinding = require('./turso.win32-ia32-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-ia32-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-ia32-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm64":
+ break
+ case 'arm64':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-arm64-msvc.node"),
- );
+ join(__dirname, 'turso.win32-arm64-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-arm64-msvc.node");
+ nativeBinding = require('./turso.win32-arm64-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-arm64-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-arm64-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Windows: ${arch}`);
+ throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
- break;
- case "darwin":
- localFileExisted = existsSync(
- join(__dirname, "turso.darwin-universal.node"),
- );
+ break
+ case 'darwin':
+ localFileExisted = existsSync(join(__dirname, 'turso.darwin-universal.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-universal.node");
+ nativeBinding = require('./turso.darwin-universal.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-universal");
+ nativeBinding = require('@tursodatabase/turso-darwin-universal')
}
- break;
+ break
} catch {}
switch (arch) {
- case "x64":
- localFileExisted = existsSync(
- join(__dirname, "turso.darwin-x64.node"),
- );
+ case 'x64':
+ localFileExisted = existsSync(join(__dirname, 'turso.darwin-x64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-x64.node");
+ nativeBinding = require('./turso.darwin-x64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-x64");
+ nativeBinding = require('@tursodatabase/turso-darwin-x64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm64":
+ break
+ case 'arm64':
localFileExisted = existsSync(
- join(__dirname, "turso.darwin-arm64.node"),
- );
+ join(__dirname, 'turso.darwin-arm64.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-arm64.node");
+ nativeBinding = require('./turso.darwin-arm64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-arm64");
+ nativeBinding = require('@tursodatabase/turso-darwin-arm64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on macOS: ${arch}`);
+ throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
- break;
- case "freebsd":
- if (arch !== "x64") {
- throw new Error(`Unsupported architecture on FreeBSD: ${arch}`);
+ break
+ case 'freebsd':
+ if (arch !== 'x64') {
+ throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
- localFileExisted = existsSync(
- join(__dirname, "turso.freebsd-x64.node"),
- );
+ localFileExisted = existsSync(join(__dirname, 'turso.freebsd-x64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.freebsd-x64.node");
+ nativeBinding = require('./turso.freebsd-x64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-freebsd-x64");
+ nativeBinding = require('@tursodatabase/turso-freebsd-x64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "linux":
+ break
+ case 'linux':
switch (arch) {
- case "x64":
+ case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-x64-musl.node"),
- );
+ join(__dirname, 'turso.linux-x64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-x64-musl.node");
+ nativeBinding = require('./turso.linux-x64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-x64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-x64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-x64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-x64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-x64-gnu.node");
+ nativeBinding = require('./turso.linux-x64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-x64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-x64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "arm64":
+ break
+ case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm64-musl.node"),
- );
+ join(__dirname, 'turso.linux-arm64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm64-musl.node");
+ nativeBinding = require('./turso.linux-arm64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-arm64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-arm64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm64-gnu.node");
+ nativeBinding = require('./turso.linux-arm64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-arm64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "arm":
+ break
+ case 'arm':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm-musleabihf.node"),
- );
+ join(__dirname, 'turso.linux-arm-musleabihf.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm-musleabihf.node");
+ nativeBinding = require('./turso.linux-arm-musleabihf.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm-musleabihf");
+ nativeBinding = require('@tursodatabase/turso-linux-arm-musleabihf')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm-gnueabihf.node"),
- );
+ join(__dirname, 'turso.linux-arm-gnueabihf.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm-gnueabihf.node");
+ nativeBinding = require('./turso.linux-arm-gnueabihf.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm-gnueabihf");
+ nativeBinding = require('@tursodatabase/turso-linux-arm-gnueabihf')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "riscv64":
+ break
+ case 'riscv64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-riscv64-musl.node"),
- );
+ join(__dirname, 'turso.linux-riscv64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-riscv64-musl.node");
+ nativeBinding = require('./turso.linux-riscv64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-riscv64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-riscv64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-riscv64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-riscv64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-riscv64-gnu.node");
+ nativeBinding = require('./turso.linux-riscv64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-riscv64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-riscv64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "s390x":
+ break
+ case 's390x':
localFileExisted = existsSync(
- join(__dirname, "turso.linux-s390x-gnu.node"),
- );
+ join(__dirname, 'turso.linux-s390x-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-s390x-gnu.node");
+ nativeBinding = require('./turso.linux-s390x-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-s390x-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-s390x-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Linux: ${arch}`);
+ throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
- break;
+ break
default:
- throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`);
+ throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
- throw loadError;
+ throw loadError
}
- throw new Error(`Failed to load native binding`);
+ throw new Error(`Failed to load native binding`)
}
-const { Database, Statement } = nativeBinding;
+const { Database, Statement, IteratorStatement } = nativeBinding
-module.exports.Database = Database;
-module.exports.Statement = Statement;
+module.exports.Database = Database
+module.exports.Statement = Statement
+module.exports.IteratorStatement = IteratorStatement
diff --git a/bindings/javascript/npm/darwin-universal/package.json b/bindings/javascript/npm/darwin-universal/package.json
index c5047c2bb..d0d67e532 100644
--- a/bindings/javascript/npm/darwin-universal/package.json
+++ b/bindings/javascript/npm/darwin-universal/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-darwin-universal",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/npm/linux-x64-gnu/package.json b/bindings/javascript/npm/linux-x64-gnu/package.json
index aa3d65f33..41f793ddb 100644
--- a/bindings/javascript/npm/linux-x64-gnu/package.json
+++ b/bindings/javascript/npm/linux-x64-gnu/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-linux-x64-gnu",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/npm/win32-x64-msvc/package.json b/bindings/javascript/npm/win32-x64-msvc/package.json
index 0b4bac4cb..f5339ea01 100644
--- a/bindings/javascript/npm/win32-x64-msvc/package.json
+++ b/bindings/javascript/npm/win32-x64-msvc/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-win32-x64-msvc",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/package-lock.json b/bindings/javascript/package-lock.json
index 98014d52b..93b39fe9c 100644
--- a/bindings/javascript/package-lock.json
+++ b/bindings/javascript/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "^2.18.4",
diff --git a/bindings/javascript/package.json b/bindings/javascript/package.json
index 809015934..fbacd7543 100644
--- a/bindings/javascript/package.json
+++ b/bindings/javascript/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
@@ -42,4 +42,4 @@
"version": "napi version"
},
"packageManager": "yarn@4.6.0"
-}
+}
\ No newline at end of file
diff --git a/bindings/javascript/sqlite-error.js b/bindings/javascript/sqlite-error.js
new file mode 100644
index 000000000..82356bc36
--- /dev/null
+++ b/bindings/javascript/sqlite-error.js
@@ -0,0 +1,22 @@
+'use strict';
+const descriptor = { value: 'SqliteError', writable: true, enumerable: false, configurable: true };
+
+function SqliteError(message, code, rawCode) {
+ if (new.target !== SqliteError) {
+ return new SqliteError(message, code);
+ }
+ if (typeof code !== 'string') {
+ throw new TypeError('Expected second argument to be a string');
+ }
+ Error.call(this, message);
+ descriptor.value = '' + message;
+ Object.defineProperty(this, 'message', descriptor);
+ Error.captureStackTrace(this, SqliteError);
+ this.code = code;
+ this.rawCode = rawCode
+}
+Object.setPrototypeOf(SqliteError, Error);
+Object.setPrototypeOf(SqliteError.prototype, Error.prototype);
+Object.defineProperty(SqliteError.prototype, 'name', descriptor);
+module.exports = SqliteError;
+
diff --git a/bindings/javascript/src/lib.rs b/bindings/javascript/src/lib.rs
index 248c240b4..15c32940f 100644
--- a/bindings/javascript/src/lib.rs
+++ b/bindings/javascript/src/lib.rs
@@ -14,12 +14,18 @@ use turso_core::{LimboError, StepResult};
#[derive(Default)]
#[napi(object)]
pub struct OpenDatabaseOptions {
- pub readonly: bool,
- pub file_must_exist: bool,
- pub timeout: u32,
+ pub readonly: Option,
+ pub file_must_exist: Option,
+ pub timeout: Option,
// verbose => Callback,
}
+impl OpenDatabaseOptions {
+ fn readonly(&self) -> bool {
+ self.readonly.unwrap_or(false)
+ }
+}
+
#[napi(object)]
pub struct PragmaOptions {
pub simple: bool,
@@ -41,7 +47,7 @@ pub struct Database {
pub name: String,
_db: Arc,
conn: Arc,
- io: Arc,
+ _io: Arc,
}
impl ObjectFinalize for Database {
@@ -55,34 +61,36 @@ impl ObjectFinalize for Database {
#[napi]
impl Database {
#[napi(constructor)]
- pub fn new(path: String, options: Option) -> napi::Result {
+ pub fn new(path: String, options: Option) -> napi::Result {
let memory = path == ":memory:";
let io: Arc = if memory {
Arc::new(turso_core::MemoryIO::new())
} else {
- Arc::new(turso_core::PlatformIO::new().map_err(into_napi_error)?)
+ Arc::new(turso_core::PlatformIO::new().map_err(into_napi_sqlite_error)?)
};
let opts = options.unwrap_or_default();
- let flag = if opts.readonly {
+ let flag = if opts.readonly() {
turso_core::OpenFlags::ReadOnly
} else {
turso_core::OpenFlags::Create
};
- let file = io.open_file(&path, flag, false).map_err(into_napi_error)?;
+ let file = io
+ .open_file(&path, flag, false)
+ .map_err(|err| into_napi_error_with_message("SQLITE_CANTOPEN".to_owned(), err))?;
let db_file = Arc::new(DatabaseFile::new(file));
let db = turso_core::Database::open(io.clone(), &path, db_file, false, false)
- .map_err(into_napi_error)?;
- let conn = db.connect().map_err(into_napi_error)?;
+ .map_err(into_napi_sqlite_error)?;
+ let conn = db.connect().map_err(into_napi_sqlite_error)?;
Ok(Self {
- readonly: opts.readonly,
+ readonly: opts.readonly(),
memory,
_db: db,
conn,
open: true,
name: path,
- io,
+ _io: io,
})
}
@@ -114,7 +122,7 @@ impl Database {
return Ok(env.get_undefined()?.into_unknown())
}
turso_core::StepResult::IO => {
- self.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
continue;
}
step @ turso_core::StepResult::Interrupt
@@ -131,16 +139,6 @@ impl Database {
}
}
- #[napi]
- pub fn readonly(&self) -> bool {
- self.readonly
- }
-
- #[napi]
- pub fn open(&self) -> bool {
- self.open
- }
-
#[napi]
pub fn backup(&self) {
todo!()
@@ -176,7 +174,7 @@ impl Database {
}
#[napi]
- pub fn exec(&self, sql: String) -> napi::Result<()> {
+ pub fn exec(&self, sql: String) -> napi::Result<(), String> {
let query_runner = self.conn.query_runner(sql.as_bytes());
// Since exec doesn't return any values, we can just iterate over the results
@@ -185,17 +183,17 @@ impl Database {
Ok(Some(mut stmt)) => loop {
match stmt.step() {
Ok(StepResult::Row) => continue,
- Ok(StepResult::IO) => self.io.run_once().map_err(into_napi_error)?,
+ Ok(StepResult::IO) => stmt.run_once().map_err(into_napi_sqlite_error)?,
Ok(StepResult::Done) => break,
Ok(StepResult::Interrupt | StepResult::Busy) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
"Statement execution interrupted or busy".to_string(),
));
}
Err(err) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
format!("Error executing SQL: {}", err),
));
}
@@ -204,7 +202,7 @@ impl Database {
Ok(None) => continue,
Err(err) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
format!("Error executing SQL: {}", err),
));
}
@@ -263,7 +261,7 @@ impl Statement {
#[napi]
pub fn get(&self, env: Env, args: Option>) -> napi::Result {
- let mut stmt = self.check_and_bind(args)?;
+ let mut stmt = self.check_and_bind(env, args)?;
loop {
let step = stmt.step().map_err(into_napi_error)?;
@@ -308,7 +306,7 @@ impl Statement {
}
turso_core::StepResult::Done => return Ok(env.get_undefined()?.into_unknown()),
turso_core::StepResult::IO => {
- self.database.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
continue;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => {
@@ -324,7 +322,7 @@ impl Statement {
// TODO: Return Info object (https://github.com/WiseLibs/better-sqlite3/blob/master/docs/api.md#runbindparameters---object)
#[napi]
pub fn run(&self, env: Env, args: Option>) -> napi::Result {
- let stmt = self.check_and_bind(args)?;
+ let stmt = self.check_and_bind(env, args)?;
self.internal_all(env, stmt)
}
@@ -335,10 +333,15 @@ impl Statement {
env: Env,
args: Option>,
) -> napi::Result {
- self.check_and_bind(args)?;
+ if let Some(some_args) = args.as_ref() {
+ if some_args.iter().len() != 0 {
+ self.check_and_bind(env, args)?;
+ }
+ }
+
Ok(IteratorStatement {
stmt: Rc::clone(&self.inner),
- database: self.database.clone(),
+ _database: self.database.clone(),
env,
presentation_mode: self.presentation_mode.clone(),
})
@@ -346,7 +349,7 @@ impl Statement {
#[napi]
pub fn all(&self, env: Env, args: Option>) -> napi::Result {
- let stmt = self.check_and_bind(args)?;
+ let stmt = self.check_and_bind(env, args)?;
self.internal_all(env, stmt)
}
@@ -401,7 +404,7 @@ impl Statement {
break;
}
turso_core::StepResult::IO => {
- self.database.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => {
return Err(napi::Error::new(
@@ -444,8 +447,9 @@ impl Statement {
}
#[napi]
- pub fn bind(&mut self, args: Option>) -> napi::Result {
- self.check_and_bind(args)?;
+ pub fn bind(&mut self, env: Env, args: Option>) -> napi::Result {
+ self.check_and_bind(env, args)
+ .map_err(with_sqlite_error_message)?;
self.binded = true;
Ok(self.clone())
@@ -455,16 +459,22 @@ impl Statement {
/// and bind values do variables. The expected type for args is `Option>`
fn check_and_bind(
&self,
+ env: Env,
args: Option>,
) -> napi::Result> {
let mut stmt = self.inner.borrow_mut();
stmt.reset();
if let Some(args) = args {
if self.binded {
- return Err(napi::Error::new(
- napi::Status::InvalidArg,
- "This statement already has bound parameters",
- ));
+ let err = napi::Error::new(
+ into_convertible_type_error_message("TypeError"),
+ "The bind() method can only be invoked once per statement object",
+ );
+ unsafe {
+ napi::JsTypeError::from(err).throw_into(env.raw());
+ }
+
+ return Err(napi::Error::from_status(napi::Status::PendingException));
}
for (i, elem) in args.into_iter().enumerate() {
@@ -480,7 +490,7 @@ impl Statement {
#[napi(iterator)]
pub struct IteratorStatement {
stmt: Rc>,
- database: Database,
+ _database: Database,
env: Env,
presentation_mode: PresentationMode,
}
@@ -528,7 +538,7 @@ impl Generator for IteratorStatement {
}
turso_core::StepResult::Done => return None,
turso_core::StepResult::IO => {
- self.database.io.run_once().ok()?;
+ stmt.run_once().ok()?;
continue;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => return None,
@@ -630,6 +640,29 @@ impl turso_core::DatabaseStorage for DatabaseFile {
}
#[inline]
-pub fn into_napi_error(limbo_error: LimboError) -> napi::Error {
+fn into_napi_error(limbo_error: LimboError) -> napi::Error {
napi::Error::new(napi::Status::GenericFailure, format!("{limbo_error}"))
}
+
+#[inline]
+fn into_napi_sqlite_error(limbo_error: LimboError) -> napi::Error {
+ napi::Error::new(String::from("SQLITE_ERROR"), format!("{limbo_error}"))
+}
+
+#[inline]
+fn into_napi_error_with_message(
+ error_code: String,
+ limbo_error: LimboError,
+) -> napi::Error {
+ napi::Error::new(error_code, format!("{limbo_error}"))
+}
+
+#[inline]
+fn with_sqlite_error_message(err: napi::Error) -> napi::Error {
+ napi::Error::new("SQLITE_ERROR".to_owned(), err.reason)
+}
+
+#[inline]
+fn into_convertible_type_error_message(error_type: &str) -> String {
+ "[TURSO_CONVERT_TYPE]".to_owned() + error_type
+}
diff --git a/bindings/javascript/wrapper.js b/bindings/javascript/wrapper.js
index c42e1246d..0d4c53c96 100644
--- a/bindings/javascript/wrapper.js
+++ b/bindings/javascript/wrapper.js
@@ -2,6 +2,28 @@
const { Database: NativeDB } = require("./index.js");
+const SqliteError = require("./sqlite-error.js");
+
+const convertibleErrorTypes = { TypeError };
+const CONVERTIBLE_ERROR_PREFIX = '[TURSO_CONVERT_TYPE]';
+
+function convertError(err) {
+ if ((err.code ?? '').startsWith(CONVERTIBLE_ERROR_PREFIX)) {
+ return createErrorByName(err.code.substring(CONVERTIBLE_ERROR_PREFIX.length), err.message);
+ }
+
+ return new SqliteError(err.message, err.code, err.rawCode);
+}
+
+function createErrorByName(name, message) {
+ const ErrorConstructor = convertibleErrorTypes[name];
+ if (!ErrorConstructor) {
+ throw new Error(`unknown error type ${name} from Turso`);
+ }
+
+ return new ErrorConstructor(message);
+}
+
/**
* Database represents a connection that can prepare and execute SQL statements.
*/
@@ -145,7 +167,11 @@ class Database {
* @param {string} sql - The SQL statement string to execute.
*/
exec(sql) {
- this.db.exec(sql);
+ try {
+ this.db.exec(sql);
+ } catch (err) {
+ throw convertError(err);
+ }
}
/**
@@ -264,8 +290,13 @@ class Statement {
* @returns this - Statement with binded parameters
*/
bind(...bindParameters) {
- return this.stmt.bind(bindParameters.flat());
+ try {
+ return new Statement(this.stmt.bind(bindParameters.flat()), this.db);
+ } catch (err) {
+ throw convertError(err);
+ }
}
}
module.exports = Database;
+module.exports.SqliteError = SqliteError;
diff --git a/bindings/python/src/lib.rs b/bindings/python/src/lib.rs
index 83adf54c3..61693fb51 100644
--- a/bindings/python/src/lib.rs
+++ b/bindings/python/src/lib.rs
@@ -93,17 +93,24 @@ impl Cursor {
Ok::<(), anyhow::Error>(())
})?;
+ if stmt_is_dml && self.conn.conn.get_auto_commit() {
+ self.conn.conn.execute("BEGIN").map_err(|e| {
+ PyErr::new::(format!(
+ "Failed to start transaction after DDL: {:?}",
+ e
+ ))
+ })?;
+ }
+
// For DDL and DML statements,
// we need to execute the statement immediately
if stmt_is_ddl || stmt_is_dml || stmt_is_tx {
+ let mut stmt = stmt.borrow_mut();
while let turso_core::StepResult::IO = stmt
- .borrow_mut()
.step()
.map_err(|e| PyErr::new::(format!("Step error: {:?}", e)))?
{
- self.conn
- .io
- .run_once()
+ stmt.run_once()
.map_err(|e| PyErr::new::(format!("IO error: {:?}", e)))?;
}
}
@@ -132,7 +139,7 @@ impl Cursor {
return Ok(Some(py_row));
}
turso_core::StepResult::IO => {
- self.conn.io.run_once().map_err(|e| {
+ stmt.run_once().map_err(|e| {
PyErr::new::(format!("IO error: {:?}", e))
})?;
}
@@ -168,7 +175,7 @@ impl Cursor {
results.push(py_row);
}
turso_core::StepResult::IO => {
- self.conn.io.run_once().map_err(|e| {
+ stmt.run_once().map_err(|e| {
PyErr::new::(format!("IO error: {:?}", e))
})?;
}
@@ -233,7 +240,7 @@ fn stmt_is_tx(sql: &str) -> bool {
#[derive(Clone)]
pub struct Connection {
conn: Arc,
- io: Arc,
+ _io: Arc,
}
#[pymethods]
@@ -298,9 +305,11 @@ impl Connection {
impl Drop for Connection {
fn drop(&mut self) {
- self.conn
- .close()
- .expect("Failed to drop (close) connection");
+ if Arc::strong_count(&self.conn) == 1 {
+ self.conn
+ .close()
+ .expect("Failed to drop (close) connection");
+ }
}
}
@@ -308,7 +317,7 @@ impl Drop for Connection {
#[pyfunction]
pub fn connect(path: &str) -> Result {
match turso_core::Connection::from_uri(path, false, false) {
- Ok((io, conn)) => Ok(Connection { conn, io }),
+ Ok((io, conn)) => Ok(Connection { conn, _io: io }),
Err(e) => Err(PyErr::new::(format!(
"Failed to create connection: {:?}",
e
diff --git a/bindings/python/tests/test_database.py b/bindings/python/tests/test_database.py
index c9e1209dd..78c6987d0 100644
--- a/bindings/python/tests/test_database.py
+++ b/bindings/python/tests/test_database.py
@@ -158,6 +158,25 @@ def test_commit(provider):
assert record
+# Test case for: https://github.com/tursodatabase/turso/issues/2002
+@pytest.mark.parametrize("provider", ["sqlite3", "turso"])
+def test_first_rollback(provider, tmp_path):
+ db_file = tmp_path / "test_first_rollback.db"
+
+ conn = connect(provider, str(db_file))
+ cur = conn.cursor()
+ cur.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT)")
+ cur.execute("INSERT INTO users VALUES (1, 'alice')")
+ cur.execute("INSERT INTO users VALUES (2, 'bob')")
+
+ conn.rollback()
+
+ cur.execute("SELECT * FROM users")
+ users = cur.fetchall()
+
+ assert users == []
+ conn.close()
+
@pytest.mark.parametrize("provider", ["sqlite3", "turso"])
def test_with_statement(provider):
with connect(provider, "tests/database.db") as conn:
diff --git a/bindings/wasm/package-lock.json b/bindings/wasm/package-lock.json
index 93ef4f5fd..4ddc87922 100644
--- a/bindings/wasm/package-lock.json
+++ b/bindings/wasm/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "limbo-wasm",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "limbo-wasm",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"devDependencies": {
"@playwright/test": "^1.49.1",
diff --git a/bindings/wasm/package.json b/bindings/wasm/package.json
index fa71211c2..463313751 100644
--- a/bindings/wasm/package.json
+++ b/bindings/wasm/package.json
@@ -3,7 +3,7 @@
"collaborators": [
"the Limbo authors"
],
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"repository": {
"type": "git",
diff --git a/cli/app.rs b/cli/app.rs
index a5a65e138..8136f16a5 100644
--- a/cli/app.rs
+++ b/cli/app.rs
@@ -24,6 +24,7 @@ use std::{
},
time::{Duration, Instant},
};
+use tracing::level_filters::LevelFilter;
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
use turso_core::{Connection, Database, LimboError, OpenFlags, Statement, StepResult, Value};
@@ -95,7 +96,7 @@ macro_rules! query_internal {
$body(row)?;
}
StepResult::IO => {
- $self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -175,7 +176,6 @@ impl Limbo {
pub fn with_readline(mut self, mut rl: Editor) -> Self {
let h = LimboHelper::new(
self.conn.clone(),
- self.io.clone(),
self.config.as_ref().map(|c| c.highlight.clone()),
);
rl.set_helper(Some(h));
@@ -644,8 +644,7 @@ impl Limbo {
let _ = self.show_info();
}
Command::Import(args) => {
- let mut import_file =
- ImportFile::new(self.conn.clone(), self.io.clone(), &mut self.writer);
+ let mut import_file = ImportFile::new(self.conn.clone(), &mut self.writer);
import_file.import(args)
}
Command::LoadExtension(args) => {
@@ -740,7 +739,7 @@ impl Limbo {
}
Ok(StepResult::IO) => {
let start = Instant::now();
- self.io.run_once()?;
+ rows.run_once()?;
if let Some(ref mut stats) = statistics {
stats.io_time_elapsed_samples.push(start.elapsed());
}
@@ -833,7 +832,7 @@ impl Limbo {
}
Ok(StepResult::IO) => {
let start = Instant::now();
- self.io.run_once()?;
+ rows.run_once()?;
if let Some(ref mut stats) = statistics {
stats.io_time_elapsed_samples.push(start.elapsed());
}
@@ -908,7 +907,12 @@ impl Limbo {
.with_thread_ids(true)
.with_ansi(should_emit_ansi),
)
- .with(EnvFilter::from_default_env().add_directive("rustyline=off".parse().unwrap()))
+ .with(
+ EnvFilter::builder()
+ .with_default_directive(LevelFilter::OFF.into())
+ .from_env_lossy()
+ .add_directive("rustyline=off".parse().unwrap()),
+ )
.try_init()
{
println!("Unable to setup tracing appender: {:?}", e);
@@ -940,7 +944,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -996,7 +1000,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -1047,7 +1051,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
diff --git a/cli/commands/import.rs b/cli/commands/import.rs
index eee0b57d1..536dbcb24 100644
--- a/cli/commands/import.rs
+++ b/cli/commands/import.rs
@@ -21,17 +21,12 @@ pub struct ImportArgs {
pub struct ImportFile<'a> {
conn: Arc,
- io: Arc,
writer: &'a mut dyn Write,
}
impl<'a> ImportFile<'a> {
- pub fn new(
- conn: Arc,
- io: Arc,
- writer: &'a mut dyn Write,
- ) -> Self {
- Self { conn, io, writer }
+ pub fn new(conn: Arc, writer: &'a mut dyn Write) -> Self {
+ Self { conn, writer }
}
pub fn import(&mut self, args: ImportArgs) {
@@ -79,7 +74,7 @@ impl<'a> ImportFile<'a> {
while let Ok(x) = rows.step() {
match x {
turso_core::StepResult::IO => {
- self.io.run_once().unwrap();
+ rows.run_once().unwrap();
}
turso_core::StepResult::Done => break,
turso_core::StepResult::Interrupt => break,
diff --git a/cli/helper.rs b/cli/helper.rs
index 6076e1d0f..aee154662 100644
--- a/cli/helper.rs
+++ b/cli/helper.rs
@@ -40,11 +40,7 @@ pub struct LimboHelper {
}
impl LimboHelper {
- pub fn new(
- conn: Arc,
- io: Arc,
- syntax_config: Option,
- ) -> Self {
+ pub fn new(conn: Arc, syntax_config: Option) -> Self {
// Load only predefined syntax
let ps = from_uncompressed_data(include_bytes!(concat!(
env!("OUT_DIR"),
@@ -59,7 +55,7 @@ impl LimboHelper {
}
}
LimboHelper {
- completer: SqlCompleter::new(conn, io),
+ completer: SqlCompleter::new(conn),
syntax_set: ps,
theme_set: ts,
syntax_config: syntax_config.unwrap_or_default(),
@@ -141,7 +137,6 @@ impl Highlighter for LimboHelper {
pub struct SqlCompleter {
conn: Arc,
- io: Arc,
// Has to be a ref cell as Rustyline takes immutable reference to self
// This problem would be solved with Reedline as it uses &mut self for completions
cmd: RefCell,
@@ -149,10 +144,9 @@ pub struct SqlCompleter {
}
impl SqlCompleter {
- pub fn new(conn: Arc, io: Arc) -> Self {
+ pub fn new(conn: Arc) -> Self {
Self {
conn,
- io,
cmd: C::command().into(),
_cmd_phantom: PhantomData,
}
@@ -228,7 +222,7 @@ impl SqlCompleter {
candidates.push(pair);
}
StepResult::IO => {
- try_result!(self.io.run_once(), (prefix_pos, candidates));
+ try_result!(rows.run_once(), (prefix_pos, candidates));
}
StepResult::Interrupt => break,
StepResult::Done => break,
diff --git a/core/benches/benchmark.rs b/core/benches/benchmark.rs
index 5318a33c2..5ff69cba1 100644
--- a/core/benches/benchmark.rs
+++ b/core/benches/benchmark.rs
@@ -1,7 +1,7 @@
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use pprof::criterion::{Output, PProfProfiler};
use std::sync::Arc;
-use turso_core::{Database, PlatformIO, IO};
+use turso_core::{Database, PlatformIO};
fn rusqlite_open() -> rusqlite::Connection {
let sqlite_conn = rusqlite::Connection::open("../testing/testing.db").unwrap();
@@ -79,7 +79,6 @@ fn bench_execute_select_rows(criterion: &mut Criterion) {
let mut stmt = limbo_conn
.prepare(format!("SELECT * FROM users LIMIT {}", *i))
.unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -87,7 +86,7 @@ fn bench_execute_select_rows(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -141,7 +140,6 @@ fn bench_execute_select_1(criterion: &mut Criterion) {
group.bench_function("limbo_execute_select_1", |b| {
let mut stmt = limbo_conn.prepare("SELECT 1").unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -149,7 +147,7 @@ fn bench_execute_select_1(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -194,7 +192,6 @@ fn bench_execute_select_count(criterion: &mut Criterion) {
group.bench_function("limbo_execute_select_count", |b| {
let mut stmt = limbo_conn.prepare("SELECT count() FROM users").unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -202,7 +199,7 @@ fn bench_execute_select_count(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/benches/json_benchmark.rs b/core/benches/json_benchmark.rs
index 3caa4e3bb..d458d60ea 100644
--- a/core/benches/json_benchmark.rs
+++ b/core/benches/json_benchmark.rs
@@ -4,7 +4,7 @@ use pprof::{
flamegraph::Options,
};
use std::sync::Arc;
-use turso_core::{Database, PlatformIO, IO};
+use turso_core::{Database, PlatformIO};
// Title: JSONB Function Benchmarking
@@ -447,13 +447,12 @@ fn bench(criterion: &mut Criterion) {
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -606,13 +605,12 @@ fn bench_sequential_jsonb(criterion: &mut Criterion) {
group.bench_function("Limbo - Sequential", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -899,13 +897,12 @@ fn bench_json_patch(criterion: &mut Criterion) {
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/benches/tpc_h_benchmark.rs b/core/benches/tpc_h_benchmark.rs
index b976b5917..16bf857a5 100644
--- a/core/benches/tpc_h_benchmark.rs
+++ b/core/benches/tpc_h_benchmark.rs
@@ -2,7 +2,7 @@ use std::sync::Arc;
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, SamplingMode};
use pprof::criterion::{Output, PProfProfiler};
-use turso_core::{Database, PlatformIO, IO as _};
+use turso_core::{Database, PlatformIO};
const TPC_H_PATH: &str = "../perf/tpc-h/TPC-H.db";
@@ -97,7 +97,7 @@ fn bench_tpc_h_queries(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/ext/vtab_xconnect.rs b/core/ext/vtab_xconnect.rs
index 2a5993f38..6d29613c3 100644
--- a/core/ext/vtab_xconnect.rs
+++ b/core/ext/vtab_xconnect.rs
@@ -65,7 +65,10 @@ pub unsafe extern "C" fn execute(
return ResultCode::OK;
}
Ok(StepResult::IO) => {
- let _ = conn.pager.io.run_once();
+ let res = stmt.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
continue;
}
Ok(StepResult::Interrupt) => return ResultCode::Interrupt,
@@ -154,7 +157,6 @@ pub unsafe extern "C" fn stmt_step(stmt: *mut Stmt) -> ResultCode {
tracing::error!("stmt_step: null connection or context");
return ResultCode::Error;
}
- let conn: &Connection = unsafe { &*(stmt._conn as *const Connection) };
let stmt_ctx: &mut Statement = unsafe { &mut *(stmt._ctx as *mut Statement) };
while let Ok(res) = stmt_ctx.step() {
match res {
@@ -162,7 +164,10 @@ pub unsafe extern "C" fn stmt_step(stmt: *mut Stmt) -> ResultCode {
StepResult::Done => return ResultCode::EOF,
StepResult::IO => {
// always handle IO step result internally.
- let _ = conn.pager.io.run_once();
+ let res = stmt_ctx.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
continue;
}
StepResult::Interrupt => return ResultCode::Interrupt,
diff --git a/core/function.rs b/core/function.rs
index 03c731f86..3467dd738 100644
--- a/core/function.rs
+++ b/core/function.rs
@@ -619,7 +619,8 @@ impl Func {
}
}
pub fn resolve_function(name: &str, arg_count: usize) -> Result {
- match name {
+ let normalized_name = crate::util::normalize_ident(name);
+ match normalized_name.as_str() {
"avg" => {
if arg_count != 1 {
crate::bail_parse_error!("wrong number of arguments to function {}()", name)
diff --git a/core/io/unix.rs b/core/io/unix.rs
index 76dfe3c05..235df10d0 100644
--- a/core/io/unix.rs
+++ b/core/io/unix.rs
@@ -18,7 +18,7 @@ use std::{
io::{ErrorKind, Read, Seek, Write},
sync::Arc,
};
-use tracing::{debug, trace};
+use tracing::{debug, instrument, trace, Level};
struct OwnedCallbacks(UnsafeCell);
// We assume we locking on IO level is done by user.
@@ -219,6 +219,7 @@ impl IO for UnixIO {
Ok(unix_file)
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn run_once(&self) -> Result<()> {
if self.callbacks.is_empty() {
return Ok(());
@@ -333,6 +334,7 @@ impl File for UnixFile<'_> {
Ok(())
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn pread(&self, pos: usize, c: Completion) -> Result> {
let file = self.file.borrow();
let result = {
@@ -366,6 +368,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn pwrite(
&self,
pos: usize,
@@ -401,6 +404,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn sync(&self, c: Completion) -> Result> {
let file = self.file.borrow();
let result = fs::fsync(file.as_fd());
@@ -415,6 +419,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn size(&self) -> Result {
let file = self.file.borrow();
Ok(file.metadata()?.len())
diff --git a/core/lib.rs b/core/lib.rs
index 4067aac15..f5cb80b57 100644
--- a/core/lib.rs
+++ b/core/lib.rs
@@ -43,6 +43,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
use crate::storage::{header_accessor, wal::DummyWAL};
use crate::translate::optimizer::optimize_plan;
+use crate::translate::pragma::TURSO_CDC_DEFAULT_TABLE_NAME;
use crate::util::{OpenMode, OpenOptions};
use crate::vtab::VirtualTable;
use core::str;
@@ -97,7 +98,7 @@ pub type Result = std::result::Result;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum TransactionState {
- Write { change_schema: bool },
+ Write { schema_did_change: bool },
Read,
None,
}
@@ -217,7 +218,7 @@ impl Database {
if is_empty == 2 {
// parse schema
let conn = db.connect()?;
- let schema_version = get_schema_version(&conn, &io)?;
+ let schema_version = get_schema_version(&conn)?;
schema.write().schema_version = schema_version;
let rows = conn.query("SELECT * FROM sqlite_schema")?;
let mut schema = schema
@@ -225,7 +226,7 @@ impl Database {
.expect("lock on schema should succeed first try");
let syms = conn.syms.borrow();
if let Err(LimboError::ExtensionError(e)) =
- parse_schema_rows(rows, &mut schema, io, &syms, None)
+ parse_schema_rows(rows, &mut schema, &syms, None)
{
// this means that a vtab exists and we no longer have the module loaded. we print
// a warning to the user to load the module
@@ -278,6 +279,8 @@ impl Database {
cache_size: Cell::new(default_cache_size),
readonly: Cell::new(false),
wal_checkpoint_disabled: Cell::new(false),
+ capture_data_changes: RefCell::new(CaptureDataChangesMode::Off),
+ closed: Cell::new(false),
});
if let Err(e) = conn.register_builtins() {
return Err(LimboError::ExtensionError(e));
@@ -330,6 +333,8 @@ impl Database {
cache_size: Cell::new(default_cache_size),
readonly: Cell::new(false),
wal_checkpoint_disabled: Cell::new(false),
+ capture_data_changes: RefCell::new(CaptureDataChangesMode::Off),
+ closed: Cell::new(false),
});
if let Err(e) = conn.register_builtins() {
@@ -390,7 +395,7 @@ impl Database {
}
}
-fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
+fn get_schema_version(conn: &Arc) -> Result {
let mut rows = conn
.query("PRAGMA schema_version")?
.ok_or(LimboError::InternalError(
@@ -409,7 +414,7 @@ fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
schema_version = Some(row.get::(0)? as u32);
}
StepResult::IO => {
- io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => {
return Err(LimboError::InternalError(
@@ -434,6 +439,39 @@ fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
}
}
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum CaptureDataChangesMode {
+ Off,
+ RowidOnly { table: String },
+}
+
+impl CaptureDataChangesMode {
+ pub fn parse(value: &str) -> Result {
+ let (mode, table) = value
+ .split_once(",")
+ .unwrap_or((value, TURSO_CDC_DEFAULT_TABLE_NAME));
+ match mode {
+ "off" => Ok(CaptureDataChangesMode::Off),
+ "rowid-only" => Ok(CaptureDataChangesMode::RowidOnly { table: table.to_string() }),
+ _ => Err(LimboError::InvalidArgument(
+ "unexpected pragma value: expected '' or ',' parameter where mode is one of off|rowid-only".to_string(),
+ ))
+ }
+ }
+ pub fn mode_name(&self) -> &str {
+ match self {
+ CaptureDataChangesMode::Off => "off",
+ CaptureDataChangesMode::RowidOnly { .. } => "rowid-only",
+ }
+ }
+ pub fn table(&self) -> Option<&str> {
+ match self {
+ CaptureDataChangesMode::Off => None,
+ CaptureDataChangesMode::RowidOnly { table } => Some(table.as_str()),
+ }
+ }
+}
+
pub struct Connection {
_db: Arc,
pager: Rc,
@@ -450,11 +488,16 @@ pub struct Connection {
cache_size: Cell,
readonly: Cell,
wal_checkpoint_disabled: Cell,
+ capture_data_changes: RefCell,
+ closed: Cell,
}
impl Connection {
- #[instrument(skip_all, level = Level::TRACE)]
+ #[instrument(skip_all, level = Level::INFO)]
pub fn prepare(self: &Arc, sql: impl AsRef) -> Result {
+ if self.closed.get() {
+ return Err(LimboError::InternalError("Connection closed".to_string()));
+ }
if sql.as_ref().is_empty() {
return Err(LimboError::InvalidArgument(
"The supplied SQL string contains no statements".to_string(),
@@ -494,8 +537,11 @@ impl Connection {
}
}
- #[instrument(skip_all, level = Level::TRACE)]
+ #[instrument(skip_all, level = Level::INFO)]
pub fn query(self: &Arc, sql: impl AsRef) -> Result