diff --git a/.github/workflows/antithesis.yml b/.github/workflows/antithesis.yml
index f0b417632..bcd767447 100644
--- a/.github/workflows/antithesis.yml
+++ b/.github/workflows/antithesis.yml
@@ -13,7 +13,7 @@ env:
ANTITHESIS_PASSWD: ${{ secrets.ANTITHESIS_PASSWD }}
ANTITHESIS_DOCKER_HOST: us-central1-docker.pkg.dev
ANTITHESIS_DOCKER_REPO: ${{ secrets.ANTITHESIS_DOCKER_REPO }}
- ANTITHESIS_EMAIL: "penberg@turso.tech;pmuniz@turso.tech"
+ ANTITHESIS_EMAIL: ${{ secrets.ANTITHESIS_EMAIL }}
ANTITHESIS_REGISTRY_KEY: ${{ secrets.ANTITHESIS_REGISTRY_KEY }}
jobs:
diff --git a/.github/workflows/long_fuzz_tests_btree.yml b/.github/workflows/long_fuzz_tests_btree.yml
index 49e5a252e..0f38f67bf 100644
--- a/.github/workflows/long_fuzz_tests_btree.yml
+++ b/.github/workflows/long_fuzz_tests_btree.yml
@@ -28,10 +28,6 @@ jobs:
run: cargo test -- --ignored fuzz_long
env:
RUST_BACKTRACE: 1
- - name: Run ignored long tests with index
- run: cargo test -- --ignored fuzz_long
- env:
- RUST_BACKTRACE: 1
simple-stress-test:
runs-on: blacksmith-4vcpu-ubuntu-2404
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 65e19f332..fa7cd6b0d 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -73,19 +73,12 @@ jobs:
with:
prefix-key: "v1-rust" # can be updated if we need to reset caches due to non-trivial change in the dependencies (for example, custom env var were set for single workspace project)
- name: Install the project
- run: ./scripts/run-sim --iterations 50
+ run: ./scripts/run-sim --maximum-tests 2000 loop -n 50 -s
test-limbo:
runs-on: blacksmith-4vcpu-ubuntu-2404
timeout-minutes: 20
steps:
- - name: Install cargo-c
- env:
- LINK: https://github.com/lu-zero/cargo-c/releases/download/v0.10.7
- CARGO_C_FILE: cargo-c-x86_64-unknown-linux-musl.tar.gz
- run: |
- curl -L $LINK/$CARGO_C_FILE | tar xz -C ~/.cargo/bin
-
- uses: actions/checkout@v3
- name: Install uv
@@ -96,9 +89,6 @@ jobs:
- name: Set up Python
run: uv python install
- - name: Install the project
- run: uv sync --all-extras --dev --all-packages
-
- uses: "./.github/shared/install_sqlite"
- name: Test
run: make test
diff --git a/.github/workflows/rust_perf.yml b/.github/workflows/rust_perf.yml
index 9cf0cd2f1..3ee8527d2 100644
--- a/.github/workflows/rust_perf.yml
+++ b/.github/workflows/rust_perf.yml
@@ -88,7 +88,7 @@ jobs:
nyrkio-public: true
- name: Analyze SQLITE3 result with Nyrkiö
- uses: nyrkio/github-action-benchmark@HEAD
+ uses: nyrkio/change-detection@HEAD
with:
name: clickbench/sqlite3
tool: time
diff --git a/Cargo.lock b/Cargo.lock
index cb079c09c..714b14404 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -571,7 +571,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_tester"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"assert_cmd",
@@ -1879,14 +1879,14 @@ dependencies = [
[[package]]
name = "limbo-go"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"turso_core",
]
[[package]]
name = "limbo-wasm"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"console_error_panic_hook",
"getrandom 0.2.15",
@@ -1899,7 +1899,7 @@ dependencies = [
[[package]]
name = "limbo_completion"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"turso_ext",
@@ -1907,7 +1907,7 @@ dependencies = [
[[package]]
name = "limbo_crypto"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"blake3",
"data-encoding",
@@ -1920,7 +1920,7 @@ dependencies = [
[[package]]
name = "limbo_csv"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"csv",
"mimalloc",
@@ -1930,7 +1930,7 @@ dependencies = [
[[package]]
name = "limbo_ipaddr"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"ipnetwork",
"mimalloc",
@@ -1939,7 +1939,7 @@ dependencies = [
[[package]]
name = "limbo_percentile"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"turso_ext",
@@ -1947,7 +1947,7 @@ dependencies = [
[[package]]
name = "limbo_regexp"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"mimalloc",
"regex",
@@ -1956,7 +1956,7 @@ dependencies = [
[[package]]
name = "limbo_sim"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anarchist-readable-name-generator-lib",
"anyhow",
@@ -1983,7 +1983,7 @@ dependencies = [
[[package]]
name = "limbo_sqlite3"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"env_logger 0.11.7",
"libc",
@@ -1996,7 +1996,7 @@ dependencies = [
[[package]]
name = "limbo_sqlite_test_ext"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"cc",
]
@@ -2232,6 +2232,8 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
+ "regex",
+ "semver",
"syn 2.0.100",
]
@@ -2474,45 +2476,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
-[[package]]
-name = "phf"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
-dependencies = [
- "phf_shared",
-]
-
-[[package]]
-name = "phf_codegen"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
-dependencies = [
- "phf_generator",
- "phf_shared",
-]
-
-[[package]]
-name = "phf_generator"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
-dependencies = [
- "phf_shared",
- "rand 0.8.5",
-]
-
-[[package]]
-name = "phf_shared"
-version = "0.11.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
-dependencies = [
- "siphasher",
- "uncased",
-]
-
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@@ -2700,7 +2663,7 @@ dependencies = [
[[package]]
name = "py-turso"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"pyo3",
@@ -3295,12 +3258,6 @@ dependencies = [
"libc",
]
-[[package]]
-name = "siphasher"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
-
[[package]]
name = "slab"
version = "0.4.9"
@@ -3312,9 +3269,9 @@ dependencies = [
[[package]]
name = "smallvec"
-version = "1.14.0"
+version = "1.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
[[package]]
name = "socket2"
@@ -3811,7 +3768,7 @@ dependencies = [
[[package]]
name = "turso"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"tempfile",
"thiserror 2.0.12",
@@ -3821,7 +3778,7 @@ dependencies = [
[[package]]
name = "turso-java"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"jni",
"thiserror 2.0.12",
@@ -3830,7 +3787,7 @@ dependencies = [
[[package]]
name = "turso_cli"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anyhow",
"cfg-if",
@@ -3861,7 +3818,7 @@ dependencies = [
[[package]]
name = "turso_core"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"antithesis_sdk",
"bitflags 2.9.0",
@@ -3914,7 +3871,7 @@ dependencies = [
[[package]]
name = "turso_dart"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"flutter_rust_bridge",
"turso_core",
@@ -3922,7 +3879,7 @@ dependencies = [
[[package]]
name = "turso_ext"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"chrono",
"getrandom 0.3.2",
@@ -3931,7 +3888,7 @@ dependencies = [
[[package]]
name = "turso_ext_tests"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"env_logger 0.11.7",
"lazy_static",
@@ -3942,7 +3899,7 @@ dependencies = [
[[package]]
name = "turso_macros"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"proc-macro2",
"quote",
@@ -3951,7 +3908,7 @@ dependencies = [
[[package]]
name = "turso_node"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"napi",
"napi-build",
@@ -3961,7 +3918,7 @@ dependencies = [
[[package]]
name = "turso_sqlite3_parser"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"bitflags 2.9.0",
"cc",
@@ -3971,18 +3928,15 @@ dependencies = [
"log",
"memchr",
"miette",
- "phf",
- "phf_codegen",
- "phf_shared",
"serde",
+ "smallvec",
"strum",
"strum_macros",
- "uncased",
]
[[package]]
name = "turso_stress"
-version = "0.1.1"
+version = "0.1.2-pre.2"
dependencies = [
"anarchist-readable-name-generator-lib",
"antithesis_sdk",
diff --git a/Cargo.toml b/Cargo.toml
index 00fb19f0c..cca1e3091 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,25 +31,25 @@ members = [
exclude = ["perf/latency/limbo"]
[workspace.package]
-version = "0.1.1"
+version = "0.1.2-pre.2"
authors = ["the Limbo authors"]
edition = "2021"
license = "MIT"
repository = "https://github.com/tursodatabase/turso"
[workspace.dependencies]
-limbo_completion = { path = "extensions/completion", version = "0.1.1" }
-turso_core = { path = "core", version = "0.1.1" }
-limbo_crypto = { path = "extensions/crypto", version = "0.1.1" }
-limbo_csv = { path = "extensions/csv", version = "0.1.1" }
-turso_ext = { path = "extensions/core", version = "0.1.1" }
-turso_ext_tests = { path = "extensions/tests", version = "0.1.1" }
-limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.1" }
-turso_macros = { path = "macros", version = "0.1.1" }
-limbo_percentile = { path = "extensions/percentile", version = "0.1.1" }
-limbo_regexp = { path = "extensions/regexp", version = "0.1.1" }
-turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.1" }
-limbo_uuid = { path = "extensions/uuid", version = "0.1.1" }
+limbo_completion = { path = "extensions/completion", version = "0.1.2-pre.2" }
+turso_core = { path = "core", version = "0.1.2-pre.2" }
+limbo_crypto = { path = "extensions/crypto", version = "0.1.2-pre.2" }
+limbo_csv = { path = "extensions/csv", version = "0.1.2-pre.2" }
+turso_ext = { path = "extensions/core", version = "0.1.2-pre.2" }
+turso_ext_tests = { path = "extensions/tests", version = "0.1.2-pre.2" }
+limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.2-pre.2" }
+turso_macros = { path = "macros", version = "0.1.2-pre.2" }
+limbo_percentile = { path = "extensions/percentile", version = "0.1.2-pre.2" }
+limbo_regexp = { path = "extensions/regexp", version = "0.1.2-pre.2" }
+turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.2-pre.2" }
+limbo_uuid = { path = "extensions/uuid", version = "0.1.2-pre.2" }
strum = { version = "0.26", features = ["derive"] }
strum_macros = "0.26"
serde = "1.0"
diff --git a/Dockerfile.antithesis b/Dockerfile.antithesis
index f8f8e229a..46ea80a4d 100644
--- a/Dockerfile.antithesis
+++ b/Dockerfile.antithesis
@@ -12,6 +12,7 @@ WORKDIR /app
FROM chef AS planner
COPY ./Cargo.lock ./Cargo.lock
COPY ./Cargo.toml ./Cargo.toml
+COPY ./bindings/dart ./bindings/dart/
COPY ./bindings/go ./bindings/go/
COPY ./bindings/java ./bindings/java/
COPY ./bindings/javascript ./bindings/javascript/
@@ -56,6 +57,7 @@ COPY --from=planner /app/sqlite3 ./sqlite3/
COPY --from=planner /app/tests ./tests/
COPY --from=planner /app/stress ./stress/
COPY --from=planner /app/bindings/rust ./bindings/rust/
+COPY --from=planner /app/bindings/dart ./bindings/dart/
COPY --from=planner /app/bindings/go ./bindings/go/
COPY --from=planner /app/bindings/javascript ./bindings/javascript/
COPY --from=planner /app/bindings/java ./bindings/java/
@@ -84,7 +86,7 @@ RUN maturin build
#
FROM debian:bullseye-slim AS runtime
-RUN apt-get update && apt-get install -y bash curl xz-utils python3 sqlite3 bc binutils pip && rm -rf /var/lib/apt/lists/*
+RUN apt-get update && apt-get install -y bash curl xz-utils python3 procps sqlite3 bc binutils pip && rm -rf /var/lib/apt/lists/*
RUN pip install antithesis
WORKDIR /app
diff --git a/Makefile b/Makefile
index 51dbdc052..499007884 100644
--- a/Makefile
+++ b/Makefile
@@ -3,9 +3,11 @@ CURRENT_RUST_VERSION := $(shell rustc -V | sed -E 's/rustc ([0-9]+\.[0-9]+\.[0-9
CURRENT_RUST_TARGET := $(shell rustc -vV | grep host | cut -d ' ' -f 2)
RUSTUP := $(shell command -v rustup 2> /dev/null)
UNAME_S := $(shell uname -s)
+MINIMUM_TCL_VERSION := 8.6
# Executable used to execute the compatibility tests.
SQLITE_EXEC ?= scripts/limbo-sqlite3
+RUST_LOG := off
all: check-rust-version check-wasm-target limbo limbo-wasm
.PHONY: all
@@ -26,6 +28,17 @@ check-rust-version:
fi
.PHONY: check-rust-version
+check-tcl-version:
+ @printf '%s\n' \
+ 'set need "$(MINIMUM_TCL_VERSION)"' \
+ 'set have [info patchlevel]' \
+ 'if {[package vcompare $$have $$need] < 0} {' \
+ ' puts stderr "tclsh $$have found — need $$need+"' \
+ ' exit 1' \
+ '}' \
+ | tclsh
+.PHONY: check-tcl-version
+
check-wasm-target:
@echo "Checking wasm32-wasi target..."
@if ! rustup target list | grep -q "wasm32-wasi (installed)"; then \
@@ -51,27 +64,31 @@ uv-sync:
uv sync --all-packages
.PHONE: uv-sync
-test: limbo uv-sync test-compat test-vector test-sqlite3 test-shell test-extensions test-memory test-write test-update test-constraint test-collate
+uv-sync-test:
+ uv sync --all-extras --dev --package turso_test
+.PHONE: uv-sync
+
+test: limbo uv-sync-test test-compat test-vector test-sqlite3 test-shell test-memory test-write test-update test-constraint test-collate test-extensions
.PHONY: test
-test-extensions: limbo uv-sync
- uv run --project limbo_test test-extensions
+test-extensions: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) uv run --project limbo_test test-extensions
.PHONY: test-extensions
-test-shell: limbo uv-sync
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-shell
+test-shell: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-shell
.PHONY: test-shell
-test-compat:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/all.test
+test-compat: check-tcl-version
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/all.test
.PHONY: test-compat
test-vector:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/vector.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/vector.test
.PHONY: test-vector
test-time:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/time.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/time.test
.PHONY: test-time
reset-db:
@@ -85,48 +102,48 @@ test-sqlite3: reset-db
.PHONY: test-sqlite3
test-json:
- SQLITE_EXEC=$(SQLITE_EXEC) ./testing/json.test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/json.test
.PHONY: test-json
-test-memory: limbo uv-sync
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-memory
+test-memory: limbo uv-sync-test
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-memory
.PHONY: test-memory
-test-write: limbo uv-sync
+test-write: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-write; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-write; \
else \
echo "Skipping test-write: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-write
-test-update: limbo uv-sync
+test-update: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-update; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-update; \
else \
echo "Skipping test-update: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-update
-test-collate: limbo uv-sync
+test-collate: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-collate; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-collate; \
else \
echo "Skipping test-collate: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-collate
-test-constraint: limbo uv-sync
+test-constraint: limbo uv-sync-test
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
- SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-constraint; \
+ RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-constraint; \
else \
echo "Skipping test-constraint: SQLITE_EXEC does not have indexes scripts/limbo-sqlite3"; \
fi
.PHONY: test-constraint
-bench-vfs: uv-sync
+bench-vfs: uv-sync-test
cargo build --release
- uv run --project limbo_test bench-vfs "$(SQL)" "$(N)"
+ RUST_LOG=$(RUST_LOG) uv run --project limbo_test bench-vfs "$(SQL)" "$(N)"
clickbench:
./perf/clickbench/benchmark.sh
diff --git a/README.md b/README.md
index 2e9be93a6..98a13f674 100644
--- a/README.md
+++ b/README.md
@@ -64,7 +64,7 @@ Turso
Enter ".help" for usage hints.
Connected to a transient in-memory database.
Use ".open FILENAME" to reopen on a persistent database
-turso> CREATE TABLE users (id INT PRIMARY KEY, username TEXT);
+turso> CREATE TABLE users (id INT, username TEXT);
turso> INSERT INTO users VALUES (1, 'alice');
turso> INSERT INTO users VALUES (2, 'bob');
turso> SELECT * FROM users;
@@ -224,9 +224,11 @@ terms or conditions.
Thanks to all the partners of Turso!
+
+
-
+
## Contributors
diff --git a/antithesis-tests/bank-test/first_setup.py b/antithesis-tests/bank-test/first_setup.py
index fcbc33530..64ad06457 100755
--- a/antithesis-tests/bank-test/first_setup.py
+++ b/antithesis-tests/bank-test/first_setup.py
@@ -50,3 +50,5 @@ cur.execute(f"""
INSERT INTO initial_state (num_accts, total)
VALUES ({num_accts}, {total})
""")
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/first_setup.py b/antithesis-tests/stress-composer/first_setup.py
index 9d755a071..45b37466f 100755
--- a/antithesis-tests/stress-composer/first_setup.py
+++ b/antithesis-tests/stress-composer/first_setup.py
@@ -83,4 +83,6 @@ for i in range(tbl_count):
CREATE TABLE tbl_{i} ({cols_str})
""")
+con.commit()
+
print(f"DB Schemas\n------------\n{json.dumps(schemas, indent=2)}")
diff --git a/antithesis-tests/stress-composer/parallel_driver_delete.py b/antithesis-tests/stress-composer/parallel_driver_delete.py
index 4ec62079b..d2e719fec 100755
--- a/antithesis-tests/stress-composer/parallel_driver_delete.py
+++ b/antithesis-tests/stress-composer/parallel_driver_delete.py
@@ -37,6 +37,13 @@ print(f"Attempt to delete {deletions} rows in tbl_{selected_tbl}...")
for i in range(deletions):
where_clause = f"col_{pk} = {generate_random_value(tbl_schema[f'col_{pk}']['data_type'])}"
- cur.execute(f"""
- DELETE FROM tbl_{selected_tbl} WHERE {where_clause}
- """)
+ try:
+ cur.execute(f"""
+ DELETE FROM tbl_{selected_tbl} WHERE {where_clause}
+ """)
+ except turso.OperationalError:
+ con.rollback()
+ # Re-raise other operational errors
+ raise
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/parallel_driver_insert.py b/antithesis-tests/stress-composer/parallel_driver_insert.py
index 8e4f73e1f..bb5a02170 100755
--- a/antithesis-tests/stress-composer/parallel_driver_insert.py
+++ b/antithesis-tests/stress-composer/parallel_driver_insert.py
@@ -44,5 +44,8 @@ for i in range(insertions):
# Ignore UNIQUE constraint violations
pass
else:
+ con.rollback()
# Re-raise other operational errors
raise
+
+con.commit()
diff --git a/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py b/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
index d101fcfc5..594925797 100755
--- a/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
+++ b/antithesis-tests/stress-composer/parallel_driver_schema_rollback.py
@@ -17,8 +17,7 @@ cur_init = con_init.cursor()
tbl_len = cur_init.execute("SELECT count FROM tables").fetchone()[0]
selected_tbl = get_random() % tbl_len
-tbl_schema = json.loads(cur_init.execute(
- f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
+tbl_schema = json.loads(cur_init.execute(f"SELECT schema FROM schemas WHERE tbl = {selected_tbl}").fetchone()[0])
tbl_name = f"tbl_{selected_tbl}"
@@ -29,8 +28,7 @@ except Exception as e:
exit(0)
cur = con.cursor()
-cur.execute(
- "SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
+cur.execute("SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
result = cur.fetchone()
@@ -47,10 +45,8 @@ cur.execute("ALTER TABLE " + tbl_name + " RENAME TO " + tbl_name + "_old")
con.rollback()
cur = con.cursor()
-cur.execute(
- "SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
+cur.execute("SELECT sql FROM sqlite_schema WHERE type = 'table' AND name = '" + tbl_name + "'")
schema_after = cur.fetchone()[0]
-always(schema_before == schema_after,
- "schema should be the same after rollback", {})
+always(schema_before == schema_after, "schema should be the same after rollback", {})
diff --git a/antithesis-tests/stress-composer/parallel_driver_update.py b/antithesis-tests/stress-composer/parallel_driver_update.py
index e30d53acd..101508cc2 100755
--- a/antithesis-tests/stress-composer/parallel_driver_update.py
+++ b/antithesis-tests/stress-composer/parallel_driver_update.py
@@ -58,5 +58,8 @@ for i in range(updates):
# Ignore UNIQUE constraint violations
pass
else:
+ con.rollback()
# Re-raise other operational errors
raise
+
+con.commit()
diff --git a/assets/turso-nyrkio.png b/assets/turso-nyrkio.png
new file mode 100644
index 000000000..e65fcd7f5
Binary files /dev/null and b/assets/turso-nyrkio.png differ
diff --git a/bindings/go/rs_src/rows.rs b/bindings/go/rs_src/rows.rs
index 0e7e1bfbc..98739e83a 100644
--- a/bindings/go/rs_src/rows.rs
+++ b/bindings/go/rs_src/rows.rs
@@ -7,7 +7,7 @@ use turso_core::{LimboError, Statement, StepResult, Value};
pub struct LimboRows<'conn> {
stmt: Box,
- conn: &'conn mut LimboConn,
+ _conn: &'conn mut LimboConn,
err: Option,
}
@@ -15,7 +15,7 @@ impl<'conn> LimboRows<'conn> {
pub fn new(stmt: Statement, conn: &'conn mut LimboConn) -> Self {
LimboRows {
stmt: Box::new(stmt),
- conn,
+ _conn: conn,
err: None,
}
}
@@ -55,8 +55,12 @@ pub extern "C" fn rows_next(ctx: *mut c_void) -> ResultCode {
Ok(StepResult::Row) => ResultCode::Row,
Ok(StepResult::Done) => ResultCode::Done,
Ok(StepResult::IO) => {
- let _ = ctx.conn.io.run_once();
- ResultCode::Io
+ let res = ctx.stmt.run_once();
+ if res.is_err() {
+ ResultCode::Error
+ } else {
+ ResultCode::Io
+ }
}
Ok(StepResult::Busy) => ResultCode::Busy,
Ok(StepResult::Interrupt) => ResultCode::Interrupt,
diff --git a/bindings/go/rs_src/statement.rs b/bindings/go/rs_src/statement.rs
index 970ecd7cf..e1b5ae26b 100644
--- a/bindings/go/rs_src/statement.rs
+++ b/bindings/go/rs_src/statement.rs
@@ -64,7 +64,10 @@ pub extern "C" fn stmt_execute(
return ResultCode::Done;
}
Ok(StepResult::IO) => {
- let _ = stmt.conn.io.run_once();
+ let res = statement.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
}
Ok(StepResult::Busy) => {
return ResultCode::Busy;
diff --git a/bindings/java/rs_src/turso_connection.rs b/bindings/java/rs_src/turso_connection.rs
index 1d2ae9f10..8a55bf169 100644
--- a/bindings/java/rs_src/turso_connection.rs
+++ b/bindings/java/rs_src/turso_connection.rs
@@ -13,12 +13,12 @@ use turso_core::Connection;
#[derive(Clone)]
pub struct TursoConnection {
pub(crate) conn: Arc,
- pub(crate) io: Arc,
+ pub(crate) _io: Arc,
}
impl TursoConnection {
pub fn new(conn: Arc, io: Arc) -> Self {
- TursoConnection { conn, io }
+ TursoConnection { conn, _io: io }
}
#[allow(clippy::wrong_self_convention)]
diff --git a/bindings/java/rs_src/turso_statement.rs b/bindings/java/rs_src/turso_statement.rs
index 17eaa5a5b..444d34707 100644
--- a/bindings/java/rs_src/turso_statement.rs
+++ b/bindings/java/rs_src/turso_statement.rs
@@ -76,7 +76,7 @@ pub extern "system" fn Java_tech_turso_core_TursoStatement_step<'local>(
};
}
StepResult::IO => {
- if let Err(e) = stmt.connection.io.run_once() {
+ if let Err(e) = stmt.stmt.run_once() {
set_err_msg_and_throw_exception(&mut env, obj, TURSO_ETC, e.to_string());
return to_turso_step_result(&mut env, STEP_RESULT_ID_ERROR, None);
}
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
index 88c76dd85..6841a5cbc 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Connection.java
@@ -24,7 +24,9 @@ public final class JDBC4Connection implements Connection {
}
public TursoStatement prepare(String sql) throws SQLException {
- return connection.prepare(sql);
+ final TursoStatement statement = connection.prepare(sql);
+ statement.initializeColumnMetadata();
+ return statement;
}
@Override
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
index e947aa272..a3f8b3d4d 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4PreparedStatement.java
@@ -34,7 +34,6 @@ public final class JDBC4PreparedStatement extends JDBC4Statement implements Prep
super(connection);
this.sql = sql;
this.statement = connection.prepare(sql);
- this.statement.initializeColumnMetadata();
this.resultSet = new JDBC4ResultSet(this.statement.getResultSet());
}
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
index 23421bc51..85dee794d 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4ResultSet.java
@@ -319,10 +319,8 @@ public final class JDBC4ResultSet implements ResultSet, ResultSetMetaData {
}
@Override
- @SkipNullableCheck
public Object getObject(int columnIndex) throws SQLException {
- // TODO
- return null;
+ return resultSet.get(columnIndex);
}
@Override
@@ -1226,20 +1224,22 @@ public final class JDBC4ResultSet implements ResultSet, ResultSetMetaData {
@Override
public int getColumnDisplaySize(int column) throws SQLException {
- // TODO
- return 0;
+ return Integer.MAX_VALUE;
}
@Override
public String getColumnLabel(int column) throws SQLException {
- // TODO
- return "";
+ // TODO: should consider "AS" keyword
+ return getColumnName(column);
}
@Override
public String getColumnName(int column) throws SQLException {
- // TODO
- return "";
+ if (column > 0 && column <= resultSet.getColumnNames().length) {
+ return resultSet.getColumnNames()[column - 1];
+ }
+
+ throw new SQLException("Index out of bound: " + column);
}
@Override
diff --git a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
index 02831dbdd..b86b838f5 100644
--- a/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
+++ b/bindings/java/src/main/java/tech/turso/jdbc4/JDBC4Statement.java
@@ -18,6 +18,7 @@ public class JDBC4Statement implements Statement {
private final JDBC4Connection connection;
@Nullable protected TursoStatement statement = null;
+ protected long updateCount;
// Because JDBC4Statement has different life cycle in compared to tursoStatement, let's use this
// field to manage JDBC4Statement lifecycle
@@ -173,8 +174,10 @@ public class JDBC4Statement implements Statement {
// TODO: if sql is a readOnly query, do we still need the locks?
connectionLock.lock();
statement = connection.prepare(sql);
+ final long previousChanges = statement.totalChanges();
final boolean result = statement.execute();
updateGeneratedKeys();
+ updateCount = statement.totalChanges() - previousChanges;
return result;
} finally {
@@ -186,19 +189,13 @@ public class JDBC4Statement implements Statement {
@Override
public ResultSet getResultSet() throws SQLException {
requireNonNull(statement, "statement is null");
+ ensureOpen();
return new JDBC4ResultSet(statement.getResultSet());
}
@Override
public int getUpdateCount() throws SQLException {
- // TODO
- return 0;
- }
-
- @Override
- public boolean getMoreResults() throws SQLException {
- // TODO
- return false;
+ return (int) updateCount;
}
@Override
@@ -254,9 +251,22 @@ public class JDBC4Statement implements Statement {
return connection;
}
+ @Override
+ public boolean getMoreResults() throws SQLException {
+ return getMoreResults(Statement.CLOSE_CURRENT_RESULT);
+ }
+
@Override
public boolean getMoreResults(int current) throws SQLException {
- // TODO
+ requireNonNull(statement, "statement should not be null");
+
+ if (current != Statement.CLOSE_CURRENT_RESULT) {
+ throw new SQLException("Invalid argument");
+ }
+
+ statement.getResultSet().close();
+ updateCount = -1;
+
return false;
}
diff --git a/bindings/java/src/test/java/tech/turso/jdbc4/JDBC4StatementTest.java b/bindings/java/src/test/java/tech/turso/jdbc4/JDBC4StatementTest.java
index ce02eb2e8..e8266c76a 100644
--- a/bindings/java/src/test/java/tech/turso/jdbc4/JDBC4StatementTest.java
+++ b/bindings/java/src/test/java/tech/turso/jdbc4/JDBC4StatementTest.java
@@ -2,6 +2,7 @@ package tech.turso.jdbc4;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -55,6 +56,30 @@ class JDBC4StatementTest {
assertTrue(stmt.execute("SELECT * FROM users;"));
}
+ @Test
+ void execute_select() throws Exception {
+ stmt.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT);");
+ stmt.execute("INSERT INTO users VALUES (1, 'turso 1')");
+ stmt.execute("INSERT INTO users VALUES (2, 'turso 2')");
+ stmt.execute("INSERT INTO users VALUES (3, 'turso 3')");
+
+ ResultSet rs = stmt.executeQuery("SELECT * FROM users;");
+ rs.next();
+ int rowCount = 0;
+
+ do {
+ rowCount++;
+ int id = rs.getInt(1);
+ String username = rs.getString(2);
+
+ assertEquals(id, rowCount);
+ assertEquals(username, "turso " + rowCount);
+ } while (rs.next());
+
+ assertEquals(rowCount, 3);
+ assertFalse(rs.next());
+ }
+
@Test
void close_statement_test() throws Exception {
stmt.close();
diff --git a/bindings/javascript/Cargo.toml b/bindings/javascript/Cargo.toml
index f02a8abe9..7d62ede4c 100644
--- a/bindings/javascript/Cargo.toml
+++ b/bindings/javascript/Cargo.toml
@@ -13,7 +13,7 @@ crate-type = ["cdylib"]
[dependencies]
turso_core = { workspace = true }
napi = { version = "2.16.17", default-features = false, features = ["napi4"] }
-napi-derive = { version = "2.16.13", default-features = false }
+napi-derive = { version = "2.16.13", default-features = true }
[build-dependencies]
napi-build = "2.2.0"
diff --git a/bindings/javascript/__test__/better-sqlite3.spec.mjs b/bindings/javascript/__test__/better-sqlite3.spec.mjs
index b7ec7cf01..992cda585 100644
--- a/bindings/javascript/__test__/better-sqlite3.spec.mjs
+++ b/bindings/javascript/__test__/better-sqlite3.spec.mjs
@@ -1,44 +1,57 @@
-import test from "ava";
+import crypto from 'crypto';
import fs from "node:fs";
import { fileURLToPath } from "url";
import path from "node:path"
+import DualTest from "./dual-test.mjs";
-import Database from "better-sqlite3";
+const inMemoryTest = new DualTest(":memory:");
+const foobarTest = new DualTest("foobar.db");
-test("Open in-memory database", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Open in-memory database", async (t) => {
+ const db = t.context.db;
t.is(db.memory, true);
});
-test("Property .name of in-memory database", async (t) => {
- let name = ":memory:";
- const db = new Database(name);
- t.is(db.name, name);
+inMemoryTest.both("Property .name of in-memory database", async (t) => {
+ const db = t.context.db;
+ t.is(db.name, t.context.path);
});
-test("Property .name of database", async (t) => {
- let name = "foobar.db";
- const db = new Database(name);
- t.is(db.name, name);
+foobarTest.both("Property .name of database", async (t) => {
+ const db = t.context.db;
+ t.is(db.name, t.context.path);
});
-test("Property .readonly of database if set", async (t) => {
- const db = new Database("foobar.db", { readonly: true });
- t.is(db.readonly, true);
-});
+new DualTest("foobar.db", { readonly: true })
+ .both("Property .readonly of database if set", async (t) => {
+ const db = t.context.db;
+ t.is(db.readonly, true);
+ });
-test("Property .readonly of database if not set", async (t) => {
- const db = new Database("foobar.db");
+const genDatabaseFilename = () => {
+ return `test-${crypto.randomBytes(8).toString('hex')}.db`;
+};
+
+new DualTest().both("opening a read-only database fails if the file doesn't exist", async (t) => {
+ t.throws(() => t.context.connect(genDatabaseFilename(), { readonly: true }),
+ {
+ any: true,
+ code: 'SQLITE_CANTOPEN',
+ });
+})
+
+foobarTest.both("Property .readonly of database if not set", async (t) => {
+ const db = t.context.db;
t.is(db.readonly, false);
});
-test("Property .open of database", async (t) => {
- const db = new Database("foobar.db");
+foobarTest.onlySqlitePasses("Property .open of database", async (t) => {
+ const db = t.context.db;
t.is(db.open, true);
});
-test("Statement.get() returns data", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Statement.get() returns data", async (t) => {
+ const db = t.context.db;
const stmt = db.prepare("SELECT 1");
const result = stmt.get();
t.is(result["1"], 1);
@@ -46,22 +59,24 @@ test("Statement.get() returns data", async (t) => {
t.is(result2["1"], 1);
});
-test("Statement.get() returns undefined when no data", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Statement.get() returns undefined when no data", async (t) => {
+ const db = t.context.db;
const stmt = db.prepare("SELECT 1 WHERE 1 = 2");
const result = stmt.get();
t.is(result, undefined);
});
-test("Statement.run() returns correct result object", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.onlySqlitePasses("Statement.run() returns correct result object", async (t) => {
+ // run() isn't 100% compatible with better-sqlite3
+ // it should return a result object, not a row object
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT)").run();
const rows = db.prepare("INSERT INTO users (name) VALUES (?)").run("Alice");
t.deepEqual(rows, { changes: 1, lastInsertRowid: 1 });
});
-test("Statment.iterate() should correctly return an iterable object", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Statment.iterate() should correctly return an iterable object", async (t) => {
+ const db = t.context.db;
db.prepare(
"CREATE TABLE users (name TEXT, age INTEGER, nationality TEXT)",
).run();
@@ -83,31 +98,45 @@ test("Statment.iterate() should correctly return an iterable object", async (t)
}
});
-test("Empty prepared statement should throw", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Empty prepared statement should throw", async (t) => {
+ const db = t.context.db;
t.throws(
() => {
db.prepare("");
},
- { instanceOf: Error },
+ { any: true }
);
});
-test("Test pragma()", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.onlySqlitePasses("Empty prepared statement should throw the correct error", async (t) => {
+ // the previous test can be removed once this one passes in Turso
+ const db = t.context.db;
+ t.throws(
+ () => {
+ db.prepare("");
+ },
+ {
+ instanceOf: RangeError,
+ message: "The supplied SQL string contains no statements",
+ },
+ );
+});
+
+inMemoryTest.both("Test pragma()", async (t) => {
+ const db = t.context.db;
t.deepEqual(typeof db.pragma("cache_size")[0].cache_size, "number");
t.deepEqual(typeof db.pragma("cache_size", { simple: true }), "number");
});
-test("pragma query", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("pragma query", async (t) => {
+ const db = t.context.db;
let page_size = db.pragma("page_size");
let expectedValue = [{ page_size: 4096 }];
t.deepEqual(page_size, expectedValue);
});
-test("pragma table_list", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("pragma table_list", async (t) => {
+ const db = t.context.db;
let param = "sqlite_schema";
let actual = db.pragma(`table_info(${param})`);
let expectedValue = [
@@ -120,16 +149,16 @@ test("pragma table_list", async (t) => {
t.deepEqual(actual, expectedValue);
});
-test("simple pragma table_list", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("simple pragma table_list", async (t) => {
+ const db = t.context.db;
let param = "sqlite_schema";
let actual = db.pragma(`table_info(${param})`, { simple: true });
let expectedValue = 0;
t.deepEqual(actual, expectedValue);
});
-test("Statement shouldn't bind twice with bind()", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Statement shouldn't bind twice with bind()", async (t) => {
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
let stmt = db.prepare("SELECT * FROM users WHERE name = ?").bind("Alice");
@@ -141,14 +170,17 @@ test("Statement shouldn't bind twice with bind()", async (t) => {
t.throws(
() => {
- db.bind("Bob");
+ stmt.bind("Bob");
+ },
+ {
+ instanceOf: TypeError,
+ message: 'The bind() method can only be invoked once per statement object',
},
- { instanceOf: Error },
);
});
-test("Test pluck(): Rows should only have the values of the first column", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Test pluck(): Rows should only have the values of the first column", async (t) => {
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
@@ -161,8 +193,8 @@ test("Test pluck(): Rows should only have the values of the first column", async
}
});
-test("Test raw(): Rows should be returned as arrays", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Test raw(): Rows should be returned as arrays", async (t) => {
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
@@ -194,7 +226,7 @@ test("Test raw(): Rows should be returned as arrays", async (t) => {
t.deepEqual(rows[1], ["Bob", 24]);
});
-test("Test expand(): Columns should be namespaced", async (t) => {
+inMemoryTest.onlySqlitePasses("Test expand(): Columns should be namespaced", async (t) => {
const expandedResults = [
{
users: {
@@ -235,7 +267,7 @@ test("Test expand(): Columns should be namespaced", async (t) => {
},
];
- const [db] = await connect(":memory:");
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT, type TEXT)").run();
db.prepare("CREATE TABLE addresses (userName TEXT, street TEXT, type TEXT)")
.run();
@@ -270,8 +302,8 @@ test("Test expand(): Columns should be namespaced", async (t) => {
t.deepEqual(allRows, regularResults);
});
-test("Presentation modes should be mutually exclusive", async (t) => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Presentation modes should be mutually exclusive", async (t) => {
+ const db = t.context.db;
db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
@@ -310,22 +342,31 @@ test("Presentation modes should be mutually exclusive", async (t) => {
t.truthy(name);
t.assert(typeof name === "string");
}
+});
+
+inMemoryTest.onlySqlitePasses("Presentation mode 'expand' should be mutually exclusive", async (t) => {
+ // this test can be appended to the previous one when 'expand' is implemented in Turso
+ const db = t.context.db;
+ db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
+ db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
+ db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
+
+ let stmt = db.prepare("SELECT * FROM users").pluck().raw();
// test expand()
stmt = db.prepare("SELECT * FROM users").raw().pluck().expand();
- rows = stmt.all();
+ const rows = stmt.all();
t.true(Array.isArray(rows));
t.is(rows.length, 2);
t.deepEqual(rows[0], { users: { name: "Alice", age: 42 } });
t.deepEqual(rows[1], { users: { name: "Bob", age: 24 } });
-});
+})
-
-test("Test exec(): Should correctly load multiple statements from file", async (t) => {
+inMemoryTest.both("Test exec(): Should correctly load multiple statements from file", async (t) => {
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
- const [db] = await connect(":memory:");
+ const db = t.context.db;
const file = fs.readFileSync(path.resolve(__dirname, "./artifacts/basic-test.sql"), "utf8");
db.exec(file);
let rows = db.prepare("SELECT * FROM users").iterate();
@@ -335,20 +376,17 @@ test("Test exec(): Should correctly load multiple statements from file", async (
}
});
-test("Test Statement.database gets the database object", async t => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Test Statement.database gets the database object", async t => {
+ const db = t.context.db;
let stmt = db.prepare("SELECT 1");
t.is(stmt.database, db);
});
-test("Test Statement.source", async t => {
- const [db] = await connect(":memory:");
+inMemoryTest.both("Test Statement.source", async t => {
+ const db = t.context.db;
let sql = "CREATE TABLE t (id int)";
let stmt = db.prepare(sql);
t.is(stmt.source, sql);
});
-const connect = async (path) => {
- const db = new Database(path);
- return [db];
-};
+
diff --git a/bindings/javascript/__test__/limbo.spec.mjs b/bindings/javascript/__test__/limbo.spec.mjs
deleted file mode 100644
index d07a51b90..000000000
--- a/bindings/javascript/__test__/limbo.spec.mjs
+++ /dev/null
@@ -1,257 +0,0 @@
-import test from "ava";
-import fs from "node:fs";
-import { fileURLToPath } from "url";
-import path from "node:path";
-
-import Database from "../wrapper.js";
-
-test("Open in-memory database", async (t) => {
- const [db] = await connect(":memory:");
- t.is(db.memory, true);
-});
-
-test("Property .name of in-memory database", async (t) => {
- let name = ":memory:";
- const db = new Database(name);
- t.is(db.name, name);
-});
-
-test("Property .name of database", async (t) => {
- let name = "foobar.db";
- const db = new Database(name);
- t.is(db.name, name);
-});
-
-test("Statement.get() returns data", async (t) => {
- const [db] = await connect(":memory:");
- const stmt = db.prepare("SELECT 1");
- const result = stmt.get();
- t.is(result["1"], 1);
- const result2 = stmt.get();
- t.is(result2["1"], 1);
-});
-
-test("Statement.get() returns undefined when no data", async (t) => {
- const [db] = await connect(":memory:");
- const stmt = db.prepare("SELECT 1 WHERE 1 = 2");
- const result = stmt.get();
- t.is(result, undefined);
-});
-
-// run() isn't 100% compatible with better-sqlite3
-// it should return a result object, not a row object
-test("Statement.run() returns correct result object", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
- let rows = db.prepare("SELECT * FROM users").all();
- t.deepEqual(rows, [{ name: "Alice", age: 42 }]);
-});
-
-test("Statment.iterate() should correctly return an iterable object", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare(
- "CREATE TABLE users (name TEXT, age INTEGER, nationality TEXT)",
- ).run();
- db.prepare("INSERT INTO users (name, age, nationality) VALUES (?, ?, ?)").run(
- ["Alice", 42],
- "UK",
- );
- db.prepare("INSERT INTO users (name, age, nationality) VALUES (?, ?, ?)").run(
- "Bob",
- 24,
- "USA",
- );
-
- let rows = db.prepare("SELECT * FROM users").iterate();
- for (const row of rows) {
- t.truthy(row.name);
- t.truthy(row.nationality);
- t.true(typeof row.age === "number");
- }
-});
-
-test("Empty prepared statement should throw", async (t) => {
- const [db] = await connect(":memory:");
- t.throws(
- () => {
- db.prepare("");
- },
- { instanceOf: Error },
- );
-});
-
-test("Test pragma()", async (t) => {
- const [db] = await connect(":memory:");
- t.true(typeof db.pragma("cache_size")[0].cache_size === "number");
- t.true(typeof db.pragma("cache_size", { simple: true }) === "number");
-});
-
-test("Statement shouldn't bind twice with bind()", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
- let stmt = db.prepare("SELECT * FROM users WHERE name = ?").bind("Alice");
-
- for (const row of stmt.iterate()) {
- t.truthy(row.name);
- t.true(typeof row.age === "number");
- }
-
- t.throws(
- () => {
- db.bind("Bob");
- },
- { instanceOf: Error },
- );
-});
-
-test("Test pluck(): Rows should only have the values of the first column", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
-
- let stmt = db.prepare("SELECT * FROM users").pluck();
-
- for (const row of stmt.iterate()) {
- t.truthy(row);
- t.assert(typeof row === "string");
- }
-});
-
-test("Test raw(): Rows should be returned as arrays", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
-
-
- let stmt = db.prepare("SELECT * FROM users").raw();
-
- for (const row of stmt.iterate()) {
- t.true(Array.isArray(row));
- t.true(typeof row[0] === "string");
- t.true(typeof row[1] === "number");
- }
-
- stmt = db.prepare("SELECT * FROM users WHERE name = ?").raw();
- const row = stmt.get("Alice");
- t.true(Array.isArray(row));
- t.is(row.length, 2);
- t.is(row[0], "Alice");
- t.is(row[1], 42);
-
- const noRow = stmt.get("Charlie");
- t.is(noRow, undefined);
-
- stmt = db.prepare("SELECT * FROM users").raw();
- const rows = stmt.all();
- t.true(Array.isArray(rows));
- t.is(rows.length, 2);
- t.deepEqual(rows[0], ["Alice", 42]);
- t.deepEqual(rows[1], ["Bob", 24]);
-});
-
-test("Presentation modes should be mutually exclusive", async (t) => {
- const [db] = await connect(":memory:");
- db.prepare("CREATE TABLE users (name TEXT, age INTEGER)").run();
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Alice", 42);
- db.prepare("INSERT INTO users (name, age) VALUES (?, ?)").run("Bob", 24);
-
-
- // test raw()
- let stmt = db.prepare("SELECT * FROM users").pluck().raw();
-
- for (const row of stmt.iterate()) {
- t.true(Array.isArray(row));
- t.true(typeof row[0] === "string");
- t.true(typeof row[1] === "number");
- }
-
- stmt = db.prepare("SELECT * FROM users WHERE name = ?").raw();
- const row = stmt.get("Alice");
- t.true(Array.isArray(row));
- t.is(row.length, 2);
- t.is(row[0], "Alice");
- t.is(row[1], 42);
-
- const noRow = stmt.get("Charlie");
- t.is(noRow, undefined);
-
- stmt = db.prepare("SELECT * FROM users").raw();
- const rows = stmt.all();
- t.true(Array.isArray(rows));
- t.is(rows.length, 2);
- t.deepEqual(rows[0], ["Alice", 42]);
- t.deepEqual(rows[1], ["Bob", 24]);
-
- // test pluck()
- stmt = db.prepare("SELECT * FROM users").raw().pluck();
-
- for (const name of stmt.iterate()) {
- t.truthy(name);
- t.assert(typeof name === "string");
- }
-});
-
-test("Test exec(): Should correctly load multiple statements from file", async (t) => {
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = path.dirname(__filename);
-
- const [db] = await connect(":memory:");
- const file = fs.readFileSync(path.resolve(__dirname, "./artifacts/basic-test.sql"), "utf8");
- db.exec(file);
- let rows = db.prepare("SELECT * FROM users").iterate();
- for (const row of rows) {
- t.truthy(row.name);
- t.true(typeof row.age === "number");
- }
-});
-
-test("pragma query", async (t) => {
- const [db] = await connect(":memory:");
- let page_size = db.pragma("page_size");
- let expectedValue = [{ page_size: 4096 }];
- t.deepEqual(page_size, expectedValue);
-});
-
-test("pragma table_list", async (t) => {
- const [db] = await connect(":memory:");
- let param = "sqlite_schema";
- let actual = db.pragma(`table_info(${param})`);
- let expectedValue = [
- { cid: 0, name: "type", type: "TEXT", notnull: 0, dflt_value: null, pk: 0 },
- { cid: 1, name: "name", type: "TEXT", notnull: 0, dflt_value: null, pk: 0 },
- { cid: 2, name: "tbl_name", type: "TEXT", notnull: 0, dflt_value: null, pk: 0 },
- { cid: 3, name: "rootpage", type: "INT", notnull: 0, dflt_value: null, pk: 0 },
- { cid: 4, name: "sql", type: "TEXT", notnull: 0, dflt_value: null, pk: 0 },
- ];
- t.deepEqual(actual, expectedValue);
-});
-
-test("Test Statement.database gets the database object", async t => {
- const [db] = await connect(":memory:");
- let stmt = db.prepare("SELECT 1");
- t.is(stmt.database, db);
-});
-
-test("Test Statement.source", async t => {
- const [db] = await connect(":memory:");
- let sql = "CREATE TABLE t (id int)";
- let stmt = db.prepare(sql);
- t.is(stmt.source, sql);
-});
-
-test("simple pragma table_list", async (t) => {
- const [db] = await connect(":memory:");
- let param = "sqlite_schema";
- let actual = db.pragma(`table_info(${param})`, { simple: true });
- let expectedValue = 0;
- t.deepEqual(actual, expectedValue);
-});
-
-const connect = async (path) => {
- const db = new Database(path);
- return [db];
-};
diff --git a/bindings/javascript/__test__/sync.spec.mjs b/bindings/javascript/__test__/sync.spec.mjs
index f8c016021..87fe1d7d4 100644
--- a/bindings/javascript/__test__/sync.spec.mjs
+++ b/bindings/javascript/__test__/sync.spec.mjs
@@ -377,7 +377,7 @@ dualTest.both("Database.pragma()", async (t) => {
t.deepEqual(db.pragma("cache_size"), [{ "cache_size": 2000 }]);
});
-dualTest.onlySqlitePasses("errors", async (t) => {
+dualTest.both("errors", async (t) => {
const db = t.context.db;
const syntaxError = await t.throws(() => {
@@ -385,7 +385,7 @@ dualTest.onlySqlitePasses("errors", async (t) => {
}, {
any: true,
instanceOf: t.context.errorType,
- message: 'near "SYNTAX": syntax error',
+ message: /near "SYNTAX": syntax error/,
code: 'SQLITE_ERROR'
});
const noTableError = await t.throws(() => {
@@ -393,7 +393,7 @@ dualTest.onlySqlitePasses("errors", async (t) => {
}, {
any: true,
instanceOf: t.context.errorType,
- message: "no such table: missing_table",
+ message: /(Parse error: Table missing_table not found|no such table: missing_table)/,
code: 'SQLITE_ERROR'
});
diff --git a/bindings/javascript/index.d.ts b/bindings/javascript/index.d.ts
index 99433b962..37041f67a 100644
--- a/bindings/javascript/index.d.ts
+++ b/bindings/javascript/index.d.ts
@@ -3,41 +3,41 @@
/* auto-generated by NAPI-RS */
-export interface Options {
- readonly: boolean
- fileMustExist: boolean
- timeout: number
+export interface OpenDatabaseOptions {
+ readonly?: boolean
+ fileMustExist?: boolean
+ timeout?: number
+}
+export interface PragmaOptions {
+ simple: boolean
}
export declare class Database {
memory: boolean
readonly: boolean
- inTransaction: boolean
open: boolean
name: string
- constructor(path: string, options?: Options | undefined | null)
+ constructor(path: string, options?: OpenDatabaseOptions | undefined | null)
prepare(sql: string): Statement
- transaction(): void
- pragma(): void
+ pragma(pragmaName: string, options?: PragmaOptions | undefined | null): unknown
backup(): void
serialize(): void
function(): void
aggregate(): void
table(): void
- loadExtension(): void
+ loadExtension(path: string): void
+ exec(sql: string): void
+ close(): void
}
export declare class Statement {
- database: Database
source: string
- reader: boolean
- readonly: boolean
- busy: boolean
- get(): unknown
- all(): NapiResult
- run(args: Array): void
- static iterate(): void
- static pluck(): void
+ get(args?: Array | undefined | null): unknown
+ run(args?: Array | undefined | null): unknown
+ iterate(args?: Array | undefined | null): IteratorStatement
+ all(args?: Array | undefined | null): unknown
+ pluck(pluck?: boolean | undefined | null): void
static expand(): void
- static raw(): void
+ raw(raw?: boolean | undefined | null): void
static columns(): void
- static bind(): void
+ bind(args?: Array | undefined | null): Statement
}
+export declare class IteratorStatement { }
diff --git a/bindings/javascript/index.js b/bindings/javascript/index.js
index 4e9bf54a7..c1f087ea5 100644
--- a/bindings/javascript/index.js
+++ b/bindings/javascript/index.js
@@ -5,325 +5,313 @@
/* auto-generated by NAPI-RS */
const { existsSync, readFileSync } = require('fs')
-const { join } = require("path");
+const { join } = require('path')
-const { platform, arch } = process;
+const { platform, arch } = process
-let nativeBinding = null;
-let localFileExisted = false;
-let loadError = null;
+let nativeBinding = null
+let localFileExisted = false
+let loadError = null
function isMusl() {
// For Node 10
- if (!process.report || typeof process.report.getReport !== "function") {
+ if (!process.report || typeof process.report.getReport !== 'function') {
try {
- const lddPath = require("child_process")
- .execSync("which ldd")
- .toString()
- .trim();
- return readFileSync(lddPath, "utf8").includes("musl");
+ const lddPath = require('child_process').execSync('which ldd').toString().trim()
+ return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
- return true;
+ return true
}
} else {
- const { glibcVersionRuntime } = process.report.getReport().header;
- return !glibcVersionRuntime;
+ const { glibcVersionRuntime } = process.report.getReport().header
+ return !glibcVersionRuntime
}
}
switch (platform) {
- case "android":
+ case 'android':
switch (arch) {
- case "arm64":
- localFileExisted = existsSync(
- join(__dirname, "turso.android-arm64.node"),
- );
+ case 'arm64':
+ localFileExisted = existsSync(join(__dirname, 'turso.android-arm64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.android-arm64.node");
+ nativeBinding = require('./turso.android-arm64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-android-arm64");
+ nativeBinding = require('@tursodatabase/turso-android-arm64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm":
- localFileExisted = existsSync(
- join(__dirname, "turso.android-arm-eabi.node"),
- );
+ break
+ case 'arm':
+ localFileExisted = existsSync(join(__dirname, 'turso.android-arm-eabi.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.android-arm-eabi.node");
+ nativeBinding = require('./turso.android-arm-eabi.node')
} else {
- nativeBinding = require("@tursodatabase/turso-android-arm-eabi");
+ nativeBinding = require('@tursodatabase/turso-android-arm-eabi')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Android ${arch}`);
+ throw new Error(`Unsupported architecture on Android ${arch}`)
}
- break;
- case "win32":
+ break
+ case 'win32':
switch (arch) {
- case "x64":
+ case 'x64':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-x64-msvc.node"),
- );
+ join(__dirname, 'turso.win32-x64-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-x64-msvc.node");
+ nativeBinding = require('./turso.win32-x64-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-x64-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-x64-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "ia32":
+ break
+ case 'ia32':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-ia32-msvc.node"),
- );
+ join(__dirname, 'turso.win32-ia32-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-ia32-msvc.node");
+ nativeBinding = require('./turso.win32-ia32-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-ia32-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-ia32-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm64":
+ break
+ case 'arm64':
localFileExisted = existsSync(
- join(__dirname, "turso.win32-arm64-msvc.node"),
- );
+ join(__dirname, 'turso.win32-arm64-msvc.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.win32-arm64-msvc.node");
+ nativeBinding = require('./turso.win32-arm64-msvc.node')
} else {
- nativeBinding = require("@tursodatabase/turso-win32-arm64-msvc");
+ nativeBinding = require('@tursodatabase/turso-win32-arm64-msvc')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Windows: ${arch}`);
+ throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
- break;
- case "darwin":
- localFileExisted = existsSync(
- join(__dirname, "turso.darwin-universal.node"),
- );
+ break
+ case 'darwin':
+ localFileExisted = existsSync(join(__dirname, 'turso.darwin-universal.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-universal.node");
+ nativeBinding = require('./turso.darwin-universal.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-universal");
+ nativeBinding = require('@tursodatabase/turso-darwin-universal')
}
- break;
+ break
} catch {}
switch (arch) {
- case "x64":
- localFileExisted = existsSync(
- join(__dirname, "turso.darwin-x64.node"),
- );
+ case 'x64':
+ localFileExisted = existsSync(join(__dirname, 'turso.darwin-x64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-x64.node");
+ nativeBinding = require('./turso.darwin-x64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-x64");
+ nativeBinding = require('@tursodatabase/turso-darwin-x64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "arm64":
+ break
+ case 'arm64':
localFileExisted = existsSync(
- join(__dirname, "turso.darwin-arm64.node"),
- );
+ join(__dirname, 'turso.darwin-arm64.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.darwin-arm64.node");
+ nativeBinding = require('./turso.darwin-arm64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-darwin-arm64");
+ nativeBinding = require('@tursodatabase/turso-darwin-arm64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on macOS: ${arch}`);
+ throw new Error(`Unsupported architecture on macOS: ${arch}`)
}
- break;
- case "freebsd":
- if (arch !== "x64") {
- throw new Error(`Unsupported architecture on FreeBSD: ${arch}`);
+ break
+ case 'freebsd':
+ if (arch !== 'x64') {
+ throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
}
- localFileExisted = existsSync(
- join(__dirname, "turso.freebsd-x64.node"),
- );
+ localFileExisted = existsSync(join(__dirname, 'turso.freebsd-x64.node'))
try {
if (localFileExisted) {
- nativeBinding = require("./turso.freebsd-x64.node");
+ nativeBinding = require('./turso.freebsd-x64.node')
} else {
- nativeBinding = require("@tursodatabase/turso-freebsd-x64");
+ nativeBinding = require('@tursodatabase/turso-freebsd-x64')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
- case "linux":
+ break
+ case 'linux':
switch (arch) {
- case "x64":
+ case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-x64-musl.node"),
- );
+ join(__dirname, 'turso.linux-x64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-x64-musl.node");
+ nativeBinding = require('./turso.linux-x64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-x64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-x64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-x64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-x64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-x64-gnu.node");
+ nativeBinding = require('./turso.linux-x64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-x64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-x64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "arm64":
+ break
+ case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm64-musl.node"),
- );
+ join(__dirname, 'turso.linux-arm64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm64-musl.node");
+ nativeBinding = require('./turso.linux-arm64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-arm64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-arm64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm64-gnu.node");
+ nativeBinding = require('./turso.linux-arm64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-arm64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "arm":
+ break
+ case 'arm':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm-musleabihf.node"),
- );
+ join(__dirname, 'turso.linux-arm-musleabihf.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm-musleabihf.node");
+ nativeBinding = require('./turso.linux-arm-musleabihf.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm-musleabihf");
+ nativeBinding = require('@tursodatabase/turso-linux-arm-musleabihf')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-arm-gnueabihf.node"),
- );
+ join(__dirname, 'turso.linux-arm-gnueabihf.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-arm-gnueabihf.node");
+ nativeBinding = require('./turso.linux-arm-gnueabihf.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-arm-gnueabihf");
+ nativeBinding = require('@tursodatabase/turso-linux-arm-gnueabihf')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "riscv64":
+ break
+ case 'riscv64':
if (isMusl()) {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-riscv64-musl.node"),
- );
+ join(__dirname, 'turso.linux-riscv64-musl.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-riscv64-musl.node");
+ nativeBinding = require('./turso.linux-riscv64-musl.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-riscv64-musl");
+ nativeBinding = require('@tursodatabase/turso-linux-riscv64-musl')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
} else {
localFileExisted = existsSync(
- join(__dirname, "turso.linux-riscv64-gnu.node"),
- );
+ join(__dirname, 'turso.linux-riscv64-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-riscv64-gnu.node");
+ nativeBinding = require('./turso.linux-riscv64-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-riscv64-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-riscv64-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
}
- break;
- case "s390x":
+ break
+ case 's390x':
localFileExisted = existsSync(
- join(__dirname, "turso.linux-s390x-gnu.node"),
- );
+ join(__dirname, 'turso.linux-s390x-gnu.node')
+ )
try {
if (localFileExisted) {
- nativeBinding = require("./turso.linux-s390x-gnu.node");
+ nativeBinding = require('./turso.linux-s390x-gnu.node')
} else {
- nativeBinding = require("@tursodatabase/turso-linux-s390x-gnu");
+ nativeBinding = require('@tursodatabase/turso-linux-s390x-gnu')
}
} catch (e) {
- loadError = e;
+ loadError = e
}
- break;
+ break
default:
- throw new Error(`Unsupported architecture on Linux: ${arch}`);
+ throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
- break;
+ break
default:
- throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`);
+ throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
if (!nativeBinding) {
if (loadError) {
- throw loadError;
+ throw loadError
}
- throw new Error(`Failed to load native binding`);
+ throw new Error(`Failed to load native binding`)
}
-const { Database, Statement } = nativeBinding;
+const { Database, Statement, IteratorStatement } = nativeBinding
-module.exports.Database = Database;
-module.exports.Statement = Statement;
+module.exports.Database = Database
+module.exports.Statement = Statement
+module.exports.IteratorStatement = IteratorStatement
diff --git a/bindings/javascript/npm/darwin-universal/package.json b/bindings/javascript/npm/darwin-universal/package.json
index c5047c2bb..d0d67e532 100644
--- a/bindings/javascript/npm/darwin-universal/package.json
+++ b/bindings/javascript/npm/darwin-universal/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-darwin-universal",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/npm/linux-x64-gnu/package.json b/bindings/javascript/npm/linux-x64-gnu/package.json
index aa3d65f33..41f793ddb 100644
--- a/bindings/javascript/npm/linux-x64-gnu/package.json
+++ b/bindings/javascript/npm/linux-x64-gnu/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-linux-x64-gnu",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/npm/win32-x64-msvc/package.json b/bindings/javascript/npm/win32-x64-msvc/package.json
index 0b4bac4cb..f5339ea01 100644
--- a/bindings/javascript/npm/win32-x64-msvc/package.json
+++ b/bindings/javascript/npm/win32-x64-msvc/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso-win32-x64-msvc",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
diff --git a/bindings/javascript/package-lock.json b/bindings/javascript/package-lock.json
index 98014d52b..93b39fe9c 100644
--- a/bindings/javascript/package-lock.json
+++ b/bindings/javascript/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "^2.18.4",
diff --git a/bindings/javascript/package.json b/bindings/javascript/package.json
index 809015934..fbacd7543 100644
--- a/bindings/javascript/package.json
+++ b/bindings/javascript/package.json
@@ -1,6 +1,6 @@
{
"name": "@tursodatabase/turso",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"repository": {
"type": "git",
"url": "https://github.com/tursodatabase/turso"
@@ -42,4 +42,4 @@
"version": "napi version"
},
"packageManager": "yarn@4.6.0"
-}
+}
\ No newline at end of file
diff --git a/bindings/javascript/sqlite-error.js b/bindings/javascript/sqlite-error.js
new file mode 100644
index 000000000..82356bc36
--- /dev/null
+++ b/bindings/javascript/sqlite-error.js
@@ -0,0 +1,22 @@
+'use strict';
+const descriptor = { value: 'SqliteError', writable: true, enumerable: false, configurable: true };
+
+function SqliteError(message, code, rawCode) {
+ if (new.target !== SqliteError) {
+ return new SqliteError(message, code);
+ }
+ if (typeof code !== 'string') {
+ throw new TypeError('Expected second argument to be a string');
+ }
+ Error.call(this, message);
+ descriptor.value = '' + message;
+ Object.defineProperty(this, 'message', descriptor);
+ Error.captureStackTrace(this, SqliteError);
+ this.code = code;
+ this.rawCode = rawCode
+}
+Object.setPrototypeOf(SqliteError, Error);
+Object.setPrototypeOf(SqliteError.prototype, Error.prototype);
+Object.defineProperty(SqliteError.prototype, 'name', descriptor);
+module.exports = SqliteError;
+
diff --git a/bindings/javascript/src/lib.rs b/bindings/javascript/src/lib.rs
index 248c240b4..15c32940f 100644
--- a/bindings/javascript/src/lib.rs
+++ b/bindings/javascript/src/lib.rs
@@ -14,12 +14,18 @@ use turso_core::{LimboError, StepResult};
#[derive(Default)]
#[napi(object)]
pub struct OpenDatabaseOptions {
- pub readonly: bool,
- pub file_must_exist: bool,
- pub timeout: u32,
+ pub readonly: Option,
+ pub file_must_exist: Option,
+ pub timeout: Option,
// verbose => Callback,
}
+impl OpenDatabaseOptions {
+ fn readonly(&self) -> bool {
+ self.readonly.unwrap_or(false)
+ }
+}
+
#[napi(object)]
pub struct PragmaOptions {
pub simple: bool,
@@ -41,7 +47,7 @@ pub struct Database {
pub name: String,
_db: Arc,
conn: Arc,
- io: Arc,
+ _io: Arc,
}
impl ObjectFinalize for Database {
@@ -55,34 +61,36 @@ impl ObjectFinalize for Database {
#[napi]
impl Database {
#[napi(constructor)]
- pub fn new(path: String, options: Option) -> napi::Result {
+ pub fn new(path: String, options: Option) -> napi::Result {
let memory = path == ":memory:";
let io: Arc = if memory {
Arc::new(turso_core::MemoryIO::new())
} else {
- Arc::new(turso_core::PlatformIO::new().map_err(into_napi_error)?)
+ Arc::new(turso_core::PlatformIO::new().map_err(into_napi_sqlite_error)?)
};
let opts = options.unwrap_or_default();
- let flag = if opts.readonly {
+ let flag = if opts.readonly() {
turso_core::OpenFlags::ReadOnly
} else {
turso_core::OpenFlags::Create
};
- let file = io.open_file(&path, flag, false).map_err(into_napi_error)?;
+ let file = io
+ .open_file(&path, flag, false)
+ .map_err(|err| into_napi_error_with_message("SQLITE_CANTOPEN".to_owned(), err))?;
let db_file = Arc::new(DatabaseFile::new(file));
let db = turso_core::Database::open(io.clone(), &path, db_file, false, false)
- .map_err(into_napi_error)?;
- let conn = db.connect().map_err(into_napi_error)?;
+ .map_err(into_napi_sqlite_error)?;
+ let conn = db.connect().map_err(into_napi_sqlite_error)?;
Ok(Self {
- readonly: opts.readonly,
+ readonly: opts.readonly(),
memory,
_db: db,
conn,
open: true,
name: path,
- io,
+ _io: io,
})
}
@@ -114,7 +122,7 @@ impl Database {
return Ok(env.get_undefined()?.into_unknown())
}
turso_core::StepResult::IO => {
- self.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
continue;
}
step @ turso_core::StepResult::Interrupt
@@ -131,16 +139,6 @@ impl Database {
}
}
- #[napi]
- pub fn readonly(&self) -> bool {
- self.readonly
- }
-
- #[napi]
- pub fn open(&self) -> bool {
- self.open
- }
-
#[napi]
pub fn backup(&self) {
todo!()
@@ -176,7 +174,7 @@ impl Database {
}
#[napi]
- pub fn exec(&self, sql: String) -> napi::Result<()> {
+ pub fn exec(&self, sql: String) -> napi::Result<(), String> {
let query_runner = self.conn.query_runner(sql.as_bytes());
// Since exec doesn't return any values, we can just iterate over the results
@@ -185,17 +183,17 @@ impl Database {
Ok(Some(mut stmt)) => loop {
match stmt.step() {
Ok(StepResult::Row) => continue,
- Ok(StepResult::IO) => self.io.run_once().map_err(into_napi_error)?,
+ Ok(StepResult::IO) => stmt.run_once().map_err(into_napi_sqlite_error)?,
Ok(StepResult::Done) => break,
Ok(StepResult::Interrupt | StepResult::Busy) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
"Statement execution interrupted or busy".to_string(),
));
}
Err(err) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
format!("Error executing SQL: {}", err),
));
}
@@ -204,7 +202,7 @@ impl Database {
Ok(None) => continue,
Err(err) => {
return Err(napi::Error::new(
- napi::Status::GenericFailure,
+ "SQLITE_ERROR".to_owned(),
format!("Error executing SQL: {}", err),
));
}
@@ -263,7 +261,7 @@ impl Statement {
#[napi]
pub fn get(&self, env: Env, args: Option>) -> napi::Result {
- let mut stmt = self.check_and_bind(args)?;
+ let mut stmt = self.check_and_bind(env, args)?;
loop {
let step = stmt.step().map_err(into_napi_error)?;
@@ -308,7 +306,7 @@ impl Statement {
}
turso_core::StepResult::Done => return Ok(env.get_undefined()?.into_unknown()),
turso_core::StepResult::IO => {
- self.database.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
continue;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => {
@@ -324,7 +322,7 @@ impl Statement {
// TODO: Return Info object (https://github.com/WiseLibs/better-sqlite3/blob/master/docs/api.md#runbindparameters---object)
#[napi]
pub fn run(&self, env: Env, args: Option>) -> napi::Result {
- let stmt = self.check_and_bind(args)?;
+ let stmt = self.check_and_bind(env, args)?;
self.internal_all(env, stmt)
}
@@ -335,10 +333,15 @@ impl Statement {
env: Env,
args: Option>,
) -> napi::Result {
- self.check_and_bind(args)?;
+ if let Some(some_args) = args.as_ref() {
+ if some_args.iter().len() != 0 {
+ self.check_and_bind(env, args)?;
+ }
+ }
+
Ok(IteratorStatement {
stmt: Rc::clone(&self.inner),
- database: self.database.clone(),
+ _database: self.database.clone(),
env,
presentation_mode: self.presentation_mode.clone(),
})
@@ -346,7 +349,7 @@ impl Statement {
#[napi]
pub fn all(&self, env: Env, args: Option>) -> napi::Result {
- let stmt = self.check_and_bind(args)?;
+ let stmt = self.check_and_bind(env, args)?;
self.internal_all(env, stmt)
}
@@ -401,7 +404,7 @@ impl Statement {
break;
}
turso_core::StepResult::IO => {
- self.database.io.run_once().map_err(into_napi_error)?;
+ stmt.run_once().map_err(into_napi_error)?;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => {
return Err(napi::Error::new(
@@ -444,8 +447,9 @@ impl Statement {
}
#[napi]
- pub fn bind(&mut self, args: Option>) -> napi::Result {
- self.check_and_bind(args)?;
+ pub fn bind(&mut self, env: Env, args: Option>) -> napi::Result {
+ self.check_and_bind(env, args)
+ .map_err(with_sqlite_error_message)?;
self.binded = true;
Ok(self.clone())
@@ -455,16 +459,22 @@ impl Statement {
/// and bind values do variables. The expected type for args is `Option>`
fn check_and_bind(
&self,
+ env: Env,
args: Option>,
) -> napi::Result> {
let mut stmt = self.inner.borrow_mut();
stmt.reset();
if let Some(args) = args {
if self.binded {
- return Err(napi::Error::new(
- napi::Status::InvalidArg,
- "This statement already has bound parameters",
- ));
+ let err = napi::Error::new(
+ into_convertible_type_error_message("TypeError"),
+ "The bind() method can only be invoked once per statement object",
+ );
+ unsafe {
+ napi::JsTypeError::from(err).throw_into(env.raw());
+ }
+
+ return Err(napi::Error::from_status(napi::Status::PendingException));
}
for (i, elem) in args.into_iter().enumerate() {
@@ -480,7 +490,7 @@ impl Statement {
#[napi(iterator)]
pub struct IteratorStatement {
stmt: Rc>,
- database: Database,
+ _database: Database,
env: Env,
presentation_mode: PresentationMode,
}
@@ -528,7 +538,7 @@ impl Generator for IteratorStatement {
}
turso_core::StepResult::Done => return None,
turso_core::StepResult::IO => {
- self.database.io.run_once().ok()?;
+ stmt.run_once().ok()?;
continue;
}
turso_core::StepResult::Interrupt | turso_core::StepResult::Busy => return None,
@@ -630,6 +640,29 @@ impl turso_core::DatabaseStorage for DatabaseFile {
}
#[inline]
-pub fn into_napi_error(limbo_error: LimboError) -> napi::Error {
+fn into_napi_error(limbo_error: LimboError) -> napi::Error {
napi::Error::new(napi::Status::GenericFailure, format!("{limbo_error}"))
}
+
+#[inline]
+fn into_napi_sqlite_error(limbo_error: LimboError) -> napi::Error {
+ napi::Error::new(String::from("SQLITE_ERROR"), format!("{limbo_error}"))
+}
+
+#[inline]
+fn into_napi_error_with_message(
+ error_code: String,
+ limbo_error: LimboError,
+) -> napi::Error {
+ napi::Error::new(error_code, format!("{limbo_error}"))
+}
+
+#[inline]
+fn with_sqlite_error_message(err: napi::Error) -> napi::Error {
+ napi::Error::new("SQLITE_ERROR".to_owned(), err.reason)
+}
+
+#[inline]
+fn into_convertible_type_error_message(error_type: &str) -> String {
+ "[TURSO_CONVERT_TYPE]".to_owned() + error_type
+}
diff --git a/bindings/javascript/wrapper.js b/bindings/javascript/wrapper.js
index c42e1246d..0d4c53c96 100644
--- a/bindings/javascript/wrapper.js
+++ b/bindings/javascript/wrapper.js
@@ -2,6 +2,28 @@
const { Database: NativeDB } = require("./index.js");
+const SqliteError = require("./sqlite-error.js");
+
+const convertibleErrorTypes = { TypeError };
+const CONVERTIBLE_ERROR_PREFIX = '[TURSO_CONVERT_TYPE]';
+
+function convertError(err) {
+ if ((err.code ?? '').startsWith(CONVERTIBLE_ERROR_PREFIX)) {
+ return createErrorByName(err.code.substring(CONVERTIBLE_ERROR_PREFIX.length), err.message);
+ }
+
+ return new SqliteError(err.message, err.code, err.rawCode);
+}
+
+function createErrorByName(name, message) {
+ const ErrorConstructor = convertibleErrorTypes[name];
+ if (!ErrorConstructor) {
+ throw new Error(`unknown error type ${name} from Turso`);
+ }
+
+ return new ErrorConstructor(message);
+}
+
/**
* Database represents a connection that can prepare and execute SQL statements.
*/
@@ -145,7 +167,11 @@ class Database {
* @param {string} sql - The SQL statement string to execute.
*/
exec(sql) {
- this.db.exec(sql);
+ try {
+ this.db.exec(sql);
+ } catch (err) {
+ throw convertError(err);
+ }
}
/**
@@ -264,8 +290,13 @@ class Statement {
* @returns this - Statement with binded parameters
*/
bind(...bindParameters) {
- return this.stmt.bind(bindParameters.flat());
+ try {
+ return new Statement(this.stmt.bind(bindParameters.flat()), this.db);
+ } catch (err) {
+ throw convertError(err);
+ }
}
}
module.exports = Database;
+module.exports.SqliteError = SqliteError;
diff --git a/bindings/python/src/lib.rs b/bindings/python/src/lib.rs
index 83adf54c3..61693fb51 100644
--- a/bindings/python/src/lib.rs
+++ b/bindings/python/src/lib.rs
@@ -93,17 +93,24 @@ impl Cursor {
Ok::<(), anyhow::Error>(())
})?;
+ if stmt_is_dml && self.conn.conn.get_auto_commit() {
+ self.conn.conn.execute("BEGIN").map_err(|e| {
+ PyErr::new::(format!(
+ "Failed to start transaction after DDL: {:?}",
+ e
+ ))
+ })?;
+ }
+
// For DDL and DML statements,
// we need to execute the statement immediately
if stmt_is_ddl || stmt_is_dml || stmt_is_tx {
+ let mut stmt = stmt.borrow_mut();
while let turso_core::StepResult::IO = stmt
- .borrow_mut()
.step()
.map_err(|e| PyErr::new::(format!("Step error: {:?}", e)))?
{
- self.conn
- .io
- .run_once()
+ stmt.run_once()
.map_err(|e| PyErr::new::(format!("IO error: {:?}", e)))?;
}
}
@@ -132,7 +139,7 @@ impl Cursor {
return Ok(Some(py_row));
}
turso_core::StepResult::IO => {
- self.conn.io.run_once().map_err(|e| {
+ stmt.run_once().map_err(|e| {
PyErr::new::(format!("IO error: {:?}", e))
})?;
}
@@ -168,7 +175,7 @@ impl Cursor {
results.push(py_row);
}
turso_core::StepResult::IO => {
- self.conn.io.run_once().map_err(|e| {
+ stmt.run_once().map_err(|e| {
PyErr::new::(format!("IO error: {:?}", e))
})?;
}
@@ -233,7 +240,7 @@ fn stmt_is_tx(sql: &str) -> bool {
#[derive(Clone)]
pub struct Connection {
conn: Arc,
- io: Arc,
+ _io: Arc,
}
#[pymethods]
@@ -298,9 +305,11 @@ impl Connection {
impl Drop for Connection {
fn drop(&mut self) {
- self.conn
- .close()
- .expect("Failed to drop (close) connection");
+ if Arc::strong_count(&self.conn) == 1 {
+ self.conn
+ .close()
+ .expect("Failed to drop (close) connection");
+ }
}
}
@@ -308,7 +317,7 @@ impl Drop for Connection {
#[pyfunction]
pub fn connect(path: &str) -> Result {
match turso_core::Connection::from_uri(path, false, false) {
- Ok((io, conn)) => Ok(Connection { conn, io }),
+ Ok((io, conn)) => Ok(Connection { conn, _io: io }),
Err(e) => Err(PyErr::new::(format!(
"Failed to create connection: {:?}",
e
diff --git a/bindings/python/tests/test_database.py b/bindings/python/tests/test_database.py
index c9e1209dd..78c6987d0 100644
--- a/bindings/python/tests/test_database.py
+++ b/bindings/python/tests/test_database.py
@@ -158,6 +158,25 @@ def test_commit(provider):
assert record
+# Test case for: https://github.com/tursodatabase/turso/issues/2002
+@pytest.mark.parametrize("provider", ["sqlite3", "turso"])
+def test_first_rollback(provider, tmp_path):
+ db_file = tmp_path / "test_first_rollback.db"
+
+ conn = connect(provider, str(db_file))
+ cur = conn.cursor()
+ cur.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, username TEXT)")
+ cur.execute("INSERT INTO users VALUES (1, 'alice')")
+ cur.execute("INSERT INTO users VALUES (2, 'bob')")
+
+ conn.rollback()
+
+ cur.execute("SELECT * FROM users")
+ users = cur.fetchall()
+
+ assert users == []
+ conn.close()
+
@pytest.mark.parametrize("provider", ["sqlite3", "turso"])
def test_with_statement(provider):
with connect(provider, "tests/database.db") as conn:
diff --git a/bindings/rust/src/lib.rs b/bindings/rust/src/lib.rs
index 465ca1ca9..006846f92 100644
--- a/bindings/rust/src/lib.rs
+++ b/bindings/rust/src/lib.rs
@@ -569,7 +569,7 @@ mod tests {
Ok(_) => panic!("Query succeeded after WAL deletion and DB reopen, but was expected to fail because the table definition should have been in the WAL."),
Err(Error::SqlExecutionFailure(msg)) => {
assert!(
- msg.contains("test_large_persistence not found"),
+ msg.contains("no such table: test_large_persistence"),
"Expected 'test_large_persistence not found' error, but got: {}",
msg
);
diff --git a/bindings/wasm/package-lock.json b/bindings/wasm/package-lock.json
index 93ef4f5fd..4ddc87922 100644
--- a/bindings/wasm/package-lock.json
+++ b/bindings/wasm/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "limbo-wasm",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "limbo-wasm",
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"devDependencies": {
"@playwright/test": "^1.49.1",
diff --git a/bindings/wasm/package.json b/bindings/wasm/package.json
index fa71211c2..463313751 100644
--- a/bindings/wasm/package.json
+++ b/bindings/wasm/package.json
@@ -3,7 +3,7 @@
"collaborators": [
"the Limbo authors"
],
- "version": "0.1.1",
+ "version": "0.1.2-pre.2",
"license": "MIT",
"repository": {
"type": "git",
diff --git a/cli/app.rs b/cli/app.rs
index f6cec8687..8136f16a5 100644
--- a/cli/app.rs
+++ b/cli/app.rs
@@ -1,6 +1,6 @@
use crate::{
commands::{
- args::{EchoMode, TimerMode},
+ args::{EchoMode, HeadersMode, TimerMode},
import::ImportFile,
Command, CommandParser,
},
@@ -24,6 +24,7 @@ use std::{
},
time::{Duration, Instant},
};
+use tracing::level_filters::LevelFilter;
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
use turso_core::{Connection, Database, LimboError, OpenFlags, Statement, StepResult, Value};
@@ -95,7 +96,7 @@ macro_rules! query_internal {
$body(row)?;
}
StepResult::IO => {
- $self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -175,7 +176,6 @@ impl Limbo {
pub fn with_readline(mut self, mut rl: Editor) -> Self {
let h = LimboHelper::new(
self.conn.clone(),
- self.io.clone(),
self.config.as_ref().map(|c| c.highlight.clone()),
);
rl.set_helper(Some(h));
@@ -644,8 +644,7 @@ impl Limbo {
let _ = self.show_info();
}
Command::Import(args) => {
- let mut import_file =
- ImportFile::new(self.conn.clone(), self.io.clone(), &mut self.writer);
+ let mut import_file = ImportFile::new(self.conn.clone(), &mut self.writer);
import_file.import(args)
}
Command::LoadExtension(args) => {
@@ -676,6 +675,12 @@ impl Limbo {
TimerMode::Off => false,
};
}
+ Command::Headers(headers_mode) => {
+ self.opts.headers = match headers_mode.mode {
+ HeadersMode::On => true,
+ HeadersMode::Off => false,
+ };
+ }
},
}
}
@@ -688,62 +693,83 @@ impl Limbo {
) -> anyhow::Result<()> {
match output {
Ok(Some(ref mut rows)) => match self.opts.output_mode {
- OutputMode::List => loop {
- if self.interrupt_count.load(Ordering::SeqCst) > 0 {
- println!("Query interrupted.");
- return Ok(());
- }
+ OutputMode::List => {
+ let mut headers_printed = false;
+ loop {
+ if self.interrupt_count.load(Ordering::SeqCst) > 0 {
+ println!("Query interrupted.");
+ return Ok(());
+ }
- let start = Instant::now();
+ let start = Instant::now();
- match rows.step() {
- Ok(StepResult::Row) => {
- if let Some(ref mut stats) = statistics {
- stats.execute_time_elapsed_samples.push(start.elapsed());
- }
- let row = rows.row().unwrap();
- for (i, value) in row.get_values().enumerate() {
- if i > 0 {
- let _ = self.writer.write(b"|");
+ match rows.step() {
+ Ok(StepResult::Row) => {
+ if let Some(ref mut stats) = statistics {
+ stats.execute_time_elapsed_samples.push(start.elapsed());
}
- if matches!(value, Value::Null) {
- let _ = self.writer.write(self.opts.null_value.as_bytes())?;
- } else {
- let _ = self.writer.write(format!("{}", value).as_bytes())?;
+
+ // Print headers if enabled and not already printed
+ if self.opts.headers && !headers_printed {
+ for i in 0..rows.num_columns() {
+ if i > 0 {
+ let _ = self.writer.write(b"|");
+ }
+ let _ =
+ self.writer.write(rows.get_column_name(i).as_bytes());
+ }
+ let _ = self.writeln("");
+ headers_printed = true;
+ }
+
+ let row = rows.row().unwrap();
+ for (i, value) in row.get_values().enumerate() {
+ if i > 0 {
+ let _ = self.writer.write(b"|");
+ }
+ if matches!(value, Value::Null) {
+ let _ =
+ self.writer.write(self.opts.null_value.as_bytes())?;
+ } else {
+ let _ =
+ self.writer.write(format!("{}", value).as_bytes())?;
+ }
+ }
+ let _ = self.writeln("");
+ }
+ Ok(StepResult::IO) => {
+ let start = Instant::now();
+ rows.run_once()?;
+ if let Some(ref mut stats) = statistics {
+ stats.io_time_elapsed_samples.push(start.elapsed());
}
}
- let _ = self.writeln("");
- }
- Ok(StepResult::IO) => {
- let start = Instant::now();
- self.io.run_once()?;
- if let Some(ref mut stats) = statistics {
- stats.io_time_elapsed_samples.push(start.elapsed());
+ Ok(StepResult::Interrupt) => break,
+ Ok(StepResult::Done) => {
+ if let Some(ref mut stats) = statistics {
+ stats.execute_time_elapsed_samples.push(start.elapsed());
+ }
+ break;
}
- }
- Ok(StepResult::Interrupt) => break,
- Ok(StepResult::Done) => {
- if let Some(ref mut stats) = statistics {
- stats.execute_time_elapsed_samples.push(start.elapsed());
+ Ok(StepResult::Busy) => {
+ if let Some(ref mut stats) = statistics {
+ stats.execute_time_elapsed_samples.push(start.elapsed());
+ }
+ let _ = self.writeln("database is busy");
+ break;
}
- break;
- }
- Ok(StepResult::Busy) => {
- if let Some(ref mut stats) = statistics {
- stats.execute_time_elapsed_samples.push(start.elapsed());
+ Err(err) => {
+ if let Some(ref mut stats) = statistics {
+ stats.execute_time_elapsed_samples.push(start.elapsed());
+ }
+ let report =
+ miette::Error::from(err).with_source_code(sql.to_owned());
+ let _ = self.write_fmt(format_args!("{:?}", report));
+ break;
}
- let _ = self.writeln("database is busy");
- break;
- }
- Err(err) => {
- if let Some(ref mut stats) = statistics {
- stats.execute_time_elapsed_samples.push(start.elapsed());
- }
- let _ = self.writeln(err.to_string());
- break;
}
}
- },
+ }
OutputMode::Pretty => {
if self.interrupt_count.load(Ordering::SeqCst) > 0 {
println!("Query interrupted.");
@@ -806,7 +832,7 @@ impl Limbo {
}
Ok(StepResult::IO) => {
let start = Instant::now();
- self.io.run_once()?;
+ rows.run_once()?;
if let Some(ref mut stats) = statistics {
stats.io_time_elapsed_samples.push(start.elapsed());
}
@@ -881,7 +907,12 @@ impl Limbo {
.with_thread_ids(true)
.with_ansi(should_emit_ansi),
)
- .with(EnvFilter::from_default_env().add_directive("rustyline=off".parse().unwrap()))
+ .with(
+ EnvFilter::builder()
+ .with_default_directive(LevelFilter::OFF.into())
+ .from_env_lossy()
+ .add_directive("rustyline=off".parse().unwrap()),
+ )
.try_init()
{
println!("Unable to setup tracing appender: {:?}", e);
@@ -913,7 +944,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -969,7 +1000,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
@@ -1020,7 +1051,7 @@ impl Limbo {
}
}
StepResult::IO => {
- self.io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => break,
StepResult::Done => break,
diff --git a/cli/commands/args.rs b/cli/commands/args.rs
index 4c36e6ef6..2ee467fe2 100644
--- a/cli/commands/args.rs
+++ b/cli/commands/args.rs
@@ -124,3 +124,14 @@ pub struct TimerArgs {
#[arg(value_enum)]
pub mode: TimerMode,
}
+
+#[derive(Debug, Clone, Args)]
+pub struct HeadersArgs {
+ pub mode: HeadersMode,
+}
+
+#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq)]
+pub enum HeadersMode {
+ On,
+ Off,
+}
diff --git a/cli/commands/import.rs b/cli/commands/import.rs
index eee0b57d1..536dbcb24 100644
--- a/cli/commands/import.rs
+++ b/cli/commands/import.rs
@@ -21,17 +21,12 @@ pub struct ImportArgs {
pub struct ImportFile<'a> {
conn: Arc,
- io: Arc,
writer: &'a mut dyn Write,
}
impl<'a> ImportFile<'a> {
- pub fn new(
- conn: Arc,
- io: Arc,
- writer: &'a mut dyn Write,
- ) -> Self {
- Self { conn, io, writer }
+ pub fn new(conn: Arc, writer: &'a mut dyn Write) -> Self {
+ Self { conn, writer }
}
pub fn import(&mut self, args: ImportArgs) {
@@ -79,7 +74,7 @@ impl<'a> ImportFile<'a> {
while let Ok(x) = rows.step() {
match x {
turso_core::StepResult::IO => {
- self.io.run_once().unwrap();
+ rows.run_once().unwrap();
}
turso_core::StepResult::Done => break,
turso_core::StepResult::Interrupt => break,
diff --git a/cli/commands/mod.rs b/cli/commands/mod.rs
index a4a9a8d43..86c4dd476 100644
--- a/cli/commands/mod.rs
+++ b/cli/commands/mod.rs
@@ -2,8 +2,8 @@ pub mod args;
pub mod import;
use args::{
- CwdArgs, EchoArgs, ExitArgs, IndexesArgs, LoadExtensionArgs, NullValueArgs, OpcodesArgs,
- OpenArgs, OutputModeArgs, SchemaArgs, SetOutputArgs, TablesArgs, TimerArgs,
+ CwdArgs, EchoArgs, ExitArgs, HeadersArgs, IndexesArgs, LoadExtensionArgs, NullValueArgs,
+ OpcodesArgs, OpenArgs, OutputModeArgs, SchemaArgs, SetOutputArgs, TablesArgs, TimerArgs,
};
use clap::Parser;
use import::ImportArgs;
@@ -77,6 +77,9 @@ pub enum Command {
ListIndexes(IndexesArgs),
#[command(name = "timer", display_name = ".timer")]
Timer(TimerArgs),
+ /// Toggle column headers on/off in list mode
+ #[command(name = "headers", display_name = ".headers")]
+ Headers(HeadersArgs),
}
const _HELP_TEMPLATE: &str = "{before-help}{name}
diff --git a/cli/helper.rs b/cli/helper.rs
index 6076e1d0f..aee154662 100644
--- a/cli/helper.rs
+++ b/cli/helper.rs
@@ -40,11 +40,7 @@ pub struct LimboHelper {
}
impl LimboHelper {
- pub fn new(
- conn: Arc,
- io: Arc,
- syntax_config: Option,
- ) -> Self {
+ pub fn new(conn: Arc, syntax_config: Option) -> Self {
// Load only predefined syntax
let ps = from_uncompressed_data(include_bytes!(concat!(
env!("OUT_DIR"),
@@ -59,7 +55,7 @@ impl LimboHelper {
}
}
LimboHelper {
- completer: SqlCompleter::new(conn, io),
+ completer: SqlCompleter::new(conn),
syntax_set: ps,
theme_set: ts,
syntax_config: syntax_config.unwrap_or_default(),
@@ -141,7 +137,6 @@ impl Highlighter for LimboHelper {
pub struct SqlCompleter {
conn: Arc,
- io: Arc,
// Has to be a ref cell as Rustyline takes immutable reference to self
// This problem would be solved with Reedline as it uses &mut self for completions
cmd: RefCell,
@@ -149,10 +144,9 @@ pub struct SqlCompleter {
}
impl SqlCompleter {
- pub fn new(conn: Arc, io: Arc) -> Self {
+ pub fn new(conn: Arc) -> Self {
Self {
conn,
- io,
cmd: C::command().into(),
_cmd_phantom: PhantomData,
}
@@ -228,7 +222,7 @@ impl SqlCompleter {
candidates.push(pair);
}
StepResult::IO => {
- try_result!(self.io.run_once(), (prefix_pos, candidates));
+ try_result!(rows.run_once(), (prefix_pos, candidates));
}
StepResult::Interrupt => break,
StepResult::Done => break,
diff --git a/cli/input.rs b/cli/input.rs
index 1ade1528f..deb659758 100644
--- a/cli/input.rs
+++ b/cli/input.rs
@@ -83,6 +83,7 @@ pub struct Settings {
pub io: Io,
pub tracing_output: Option,
pub timer: bool,
+ pub headers: bool,
}
impl From for Settings {
@@ -107,6 +108,7 @@ impl From for Settings {
},
tracing_output: opts.tracing_output,
timer: false,
+ headers: false,
}
}
}
@@ -115,7 +117,7 @@ impl std::fmt::Display for Settings {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
- "Settings:\nOutput mode: {}\nDB: {}\nOutput: {}\nNull value: {}\nCWD: {}\nEcho: {}",
+ "Settings:\nOutput mode: {}\nDB: {}\nOutput: {}\nNull value: {}\nCWD: {}\nEcho: {}\nHeaders: {}",
self.output_mode,
self.db_file,
match self.is_stdout {
@@ -127,6 +129,10 @@ impl std::fmt::Display for Settings {
match self.echo {
true => "on",
false => "off",
+ },
+ match self.headers {
+ true => "on",
+ false => "off",
}
)
}
@@ -221,6 +227,12 @@ pub const AFTER_HELP_MSG: &str = r#"Usage Examples:
14. To show names of indexes:
.indexes ?TABLE?
+15. To turn on column headers in list mode:
+ .headers on
+
+16. To turn off column headers in list mode:
+ .headers off
+
Note:
- All SQL commands must end with a semicolon (;).
- Special commands start with a dot (.) and are not required to end with a semicolon."#;
diff --git a/core/benches/benchmark.rs b/core/benches/benchmark.rs
index 5318a33c2..5ff69cba1 100644
--- a/core/benches/benchmark.rs
+++ b/core/benches/benchmark.rs
@@ -1,7 +1,7 @@
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use pprof::criterion::{Output, PProfProfiler};
use std::sync::Arc;
-use turso_core::{Database, PlatformIO, IO};
+use turso_core::{Database, PlatformIO};
fn rusqlite_open() -> rusqlite::Connection {
let sqlite_conn = rusqlite::Connection::open("../testing/testing.db").unwrap();
@@ -79,7 +79,6 @@ fn bench_execute_select_rows(criterion: &mut Criterion) {
let mut stmt = limbo_conn
.prepare(format!("SELECT * FROM users LIMIT {}", *i))
.unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -87,7 +86,7 @@ fn bench_execute_select_rows(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -141,7 +140,6 @@ fn bench_execute_select_1(criterion: &mut Criterion) {
group.bench_function("limbo_execute_select_1", |b| {
let mut stmt = limbo_conn.prepare("SELECT 1").unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -149,7 +147,7 @@ fn bench_execute_select_1(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -194,7 +192,6 @@ fn bench_execute_select_count(criterion: &mut Criterion) {
group.bench_function("limbo_execute_select_count", |b| {
let mut stmt = limbo_conn.prepare("SELECT count() FROM users").unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
@@ -202,7 +199,7 @@ fn bench_execute_select_count(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/benches/json_benchmark.rs b/core/benches/json_benchmark.rs
index 3caa4e3bb..d458d60ea 100644
--- a/core/benches/json_benchmark.rs
+++ b/core/benches/json_benchmark.rs
@@ -4,7 +4,7 @@ use pprof::{
flamegraph::Options,
};
use std::sync::Arc;
-use turso_core::{Database, PlatformIO, IO};
+use turso_core::{Database, PlatformIO};
// Title: JSONB Function Benchmarking
@@ -447,13 +447,12 @@ fn bench(criterion: &mut Criterion) {
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -606,13 +605,12 @@ fn bench_sequential_jsonb(criterion: &mut Criterion) {
group.bench_function("Limbo - Sequential", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
@@ -899,13 +897,12 @@ fn bench_json_patch(criterion: &mut Criterion) {
group.bench_function("Limbo", |b| {
let mut stmt = limbo_conn.prepare(&query).unwrap();
- let io = io.clone();
b.iter(|| {
loop {
match stmt.step().unwrap() {
turso_core::StepResult::Row => {}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/benches/tpc_h_benchmark.rs b/core/benches/tpc_h_benchmark.rs
index b976b5917..16bf857a5 100644
--- a/core/benches/tpc_h_benchmark.rs
+++ b/core/benches/tpc_h_benchmark.rs
@@ -2,7 +2,7 @@ use std::sync::Arc;
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion, SamplingMode};
use pprof::criterion::{Output, PProfProfiler};
-use turso_core::{Database, PlatformIO, IO as _};
+use turso_core::{Database, PlatformIO};
const TPC_H_PATH: &str = "../perf/tpc-h/TPC-H.db";
@@ -97,7 +97,7 @@ fn bench_tpc_h_queries(criterion: &mut Criterion) {
black_box(stmt.row());
}
turso_core::StepResult::IO => {
- let _ = io.run_once();
+ stmt.run_once().unwrap();
}
turso_core::StepResult::Done => {
break;
diff --git a/core/ext/vtab_xconnect.rs b/core/ext/vtab_xconnect.rs
index 2a5993f38..6d29613c3 100644
--- a/core/ext/vtab_xconnect.rs
+++ b/core/ext/vtab_xconnect.rs
@@ -65,7 +65,10 @@ pub unsafe extern "C" fn execute(
return ResultCode::OK;
}
Ok(StepResult::IO) => {
- let _ = conn.pager.io.run_once();
+ let res = stmt.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
continue;
}
Ok(StepResult::Interrupt) => return ResultCode::Interrupt,
@@ -154,7 +157,6 @@ pub unsafe extern "C" fn stmt_step(stmt: *mut Stmt) -> ResultCode {
tracing::error!("stmt_step: null connection or context");
return ResultCode::Error;
}
- let conn: &Connection = unsafe { &*(stmt._conn as *const Connection) };
let stmt_ctx: &mut Statement = unsafe { &mut *(stmt._ctx as *mut Statement) };
while let Ok(res) = stmt_ctx.step() {
match res {
@@ -162,7 +164,10 @@ pub unsafe extern "C" fn stmt_step(stmt: *mut Stmt) -> ResultCode {
StepResult::Done => return ResultCode::EOF,
StepResult::IO => {
// always handle IO step result internally.
- let _ = conn.pager.io.run_once();
+ let res = stmt_ctx.run_once();
+ if res.is_err() {
+ return ResultCode::Error;
+ }
continue;
}
StepResult::Interrupt => return ResultCode::Interrupt,
diff --git a/core/function.rs b/core/function.rs
index 7827e6307..58cf87b1e 100644
--- a/core/function.rs
+++ b/core/function.rs
@@ -616,7 +616,8 @@ impl Func {
}
}
pub fn resolve_function(name: &str, arg_count: usize) -> Result {
- match name {
+ let normalized_name = crate::util::normalize_ident(name);
+ match normalized_name.as_str() {
"avg" => {
if arg_count != 1 {
crate::bail_parse_error!("wrong number of arguments to function {}()", name)
diff --git a/core/io/unix.rs b/core/io/unix.rs
index 76dfe3c05..235df10d0 100644
--- a/core/io/unix.rs
+++ b/core/io/unix.rs
@@ -18,7 +18,7 @@ use std::{
io::{ErrorKind, Read, Seek, Write},
sync::Arc,
};
-use tracing::{debug, trace};
+use tracing::{debug, instrument, trace, Level};
struct OwnedCallbacks(UnsafeCell);
// We assume we locking on IO level is done by user.
@@ -219,6 +219,7 @@ impl IO for UnixIO {
Ok(unix_file)
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn run_once(&self) -> Result<()> {
if self.callbacks.is_empty() {
return Ok(());
@@ -333,6 +334,7 @@ impl File for UnixFile<'_> {
Ok(())
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn pread(&self, pos: usize, c: Completion) -> Result> {
let file = self.file.borrow();
let result = {
@@ -366,6 +368,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn pwrite(
&self,
pos: usize,
@@ -401,6 +404,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn sync(&self, c: Completion) -> Result> {
let file = self.file.borrow();
let result = fs::fsync(file.as_fd());
@@ -415,6 +419,7 @@ impl File for UnixFile<'_> {
}
}
+ #[instrument(err, skip_all, level = Level::INFO)]
fn size(&self) -> Result {
let file = self.file.borrow();
Ok(file.metadata()?.len())
diff --git a/core/lib.rs b/core/lib.rs
index 4067aac15..f5cb80b57 100644
--- a/core/lib.rs
+++ b/core/lib.rs
@@ -43,6 +43,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
use crate::storage::{header_accessor, wal::DummyWAL};
use crate::translate::optimizer::optimize_plan;
+use crate::translate::pragma::TURSO_CDC_DEFAULT_TABLE_NAME;
use crate::util::{OpenMode, OpenOptions};
use crate::vtab::VirtualTable;
use core::str;
@@ -97,7 +98,7 @@ pub type Result = std::result::Result;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
enum TransactionState {
- Write { change_schema: bool },
+ Write { schema_did_change: bool },
Read,
None,
}
@@ -217,7 +218,7 @@ impl Database {
if is_empty == 2 {
// parse schema
let conn = db.connect()?;
- let schema_version = get_schema_version(&conn, &io)?;
+ let schema_version = get_schema_version(&conn)?;
schema.write().schema_version = schema_version;
let rows = conn.query("SELECT * FROM sqlite_schema")?;
let mut schema = schema
@@ -225,7 +226,7 @@ impl Database {
.expect("lock on schema should succeed first try");
let syms = conn.syms.borrow();
if let Err(LimboError::ExtensionError(e)) =
- parse_schema_rows(rows, &mut schema, io, &syms, None)
+ parse_schema_rows(rows, &mut schema, &syms, None)
{
// this means that a vtab exists and we no longer have the module loaded. we print
// a warning to the user to load the module
@@ -278,6 +279,8 @@ impl Database {
cache_size: Cell::new(default_cache_size),
readonly: Cell::new(false),
wal_checkpoint_disabled: Cell::new(false),
+ capture_data_changes: RefCell::new(CaptureDataChangesMode::Off),
+ closed: Cell::new(false),
});
if let Err(e) = conn.register_builtins() {
return Err(LimboError::ExtensionError(e));
@@ -330,6 +333,8 @@ impl Database {
cache_size: Cell::new(default_cache_size),
readonly: Cell::new(false),
wal_checkpoint_disabled: Cell::new(false),
+ capture_data_changes: RefCell::new(CaptureDataChangesMode::Off),
+ closed: Cell::new(false),
});
if let Err(e) = conn.register_builtins() {
@@ -390,7 +395,7 @@ impl Database {
}
}
-fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
+fn get_schema_version(conn: &Arc) -> Result {
let mut rows = conn
.query("PRAGMA schema_version")?
.ok_or(LimboError::InternalError(
@@ -409,7 +414,7 @@ fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
schema_version = Some(row.get::(0)? as u32);
}
StepResult::IO => {
- io.run_once()?;
+ rows.run_once()?;
}
StepResult::Interrupt => {
return Err(LimboError::InternalError(
@@ -434,6 +439,39 @@ fn get_schema_version(conn: &Arc, io: &Arc) -> Result {
}
}
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum CaptureDataChangesMode {
+ Off,
+ RowidOnly { table: String },
+}
+
+impl CaptureDataChangesMode {
+ pub fn parse(value: &str) -> Result {
+ let (mode, table) = value
+ .split_once(",")
+ .unwrap_or((value, TURSO_CDC_DEFAULT_TABLE_NAME));
+ match mode {
+ "off" => Ok(CaptureDataChangesMode::Off),
+ "rowid-only" => Ok(CaptureDataChangesMode::RowidOnly { table: table.to_string() }),
+ _ => Err(LimboError::InvalidArgument(
+ "unexpected pragma value: expected '' or ',' parameter where mode is one of off|rowid-only".to_string(),
+ ))
+ }
+ }
+ pub fn mode_name(&self) -> &str {
+ match self {
+ CaptureDataChangesMode::Off => "off",
+ CaptureDataChangesMode::RowidOnly { .. } => "rowid-only",
+ }
+ }
+ pub fn table(&self) -> Option<&str> {
+ match self {
+ CaptureDataChangesMode::Off => None,
+ CaptureDataChangesMode::RowidOnly { table } => Some(table.as_str()),
+ }
+ }
+}
+
pub struct Connection {
_db: Arc,
pager: Rc,
@@ -450,11 +488,16 @@ pub struct Connection {
cache_size: Cell,
readonly: Cell,
wal_checkpoint_disabled: Cell,
+ capture_data_changes: RefCell,
+ closed: Cell,
}
impl Connection {
- #[instrument(skip_all, level = Level::TRACE)]
+ #[instrument(skip_all, level = Level::INFO)]
pub fn prepare(self: &Arc, sql: impl AsRef) -> Result {
+ if self.closed.get() {
+ return Err(LimboError::InternalError("Connection closed".to_string()));
+ }
if sql.as_ref().is_empty() {
return Err(LimboError::InvalidArgument(
"The supplied SQL string contains no statements".to_string(),
@@ -494,8 +537,11 @@ impl Connection {
}
}
- #[instrument(skip_all, level = Level::TRACE)]
+ #[instrument(skip_all, level = Level::INFO)]
pub fn query(self: &Arc, sql: impl AsRef) -> Result