mirror of
https://github.com/aljazceru/turso.git
synced 2026-02-18 06:24:56 +01:00
merge main
This commit is contained in:
63
.github/workflows/napi.yml
vendored
63
.github/workflows/napi.yml
vendored
@@ -19,6 +19,10 @@ defaults:
|
||||
run:
|
||||
working-directory: bindings/javascript
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
timeout-minutes: 20
|
||||
@@ -27,20 +31,18 @@ jobs:
|
||||
matrix:
|
||||
settings:
|
||||
- host: windows-latest
|
||||
build: |
|
||||
yarn build --target x86_64-pc-windows-msvc
|
||||
yarn test
|
||||
target: x86_64-pc-windows-msvc
|
||||
build: yarn workspace @tursodatabase/database napi-build --target x86_64-pc-windows-msvc
|
||||
- host: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
build: yarn build --target x86_64-unknown-linux-gnu
|
||||
build: yarn workspace @tursodatabase/database napi-build --target x86_64-unknown-linux-gnu
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
build: yarn build --target aarch64-apple-darwin
|
||||
build: yarn workspace @tursodatabase/database napi-build --target aarch64-apple-darwin
|
||||
- host: blacksmith-2vcpu-ubuntu-2404-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
build: yarn build --target aarch64-unknown-linux-gnu
|
||||
build: yarn workspace @tursodatabase/database napi-build --target aarch64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: wasm32-wasip1-threads
|
||||
setup: |
|
||||
@@ -52,7 +54,7 @@ jobs:
|
||||
export CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)
|
||||
export TARGET_CXXFLAGS="--target=wasm32-wasi-threads --sysroot=$(pwd)/wasi-sdk-25.0-x86_64-linux/share/wasi-sysroot -pthread -mllvm -wasm-enable-sjlj -lsetjmp"
|
||||
export TARGET_CFLAGS="$TARGET_CXXFLAGS"
|
||||
yarn build --target wasm32-wasip1-threads
|
||||
yarn workspace @tursodatabase/database-browser build
|
||||
name: stable - ${{ matrix.settings.target }} - node@20
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
@@ -88,6 +90,8 @@ jobs:
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Build common
|
||||
run: yarn workspace @tursodatabase/database-common build
|
||||
- name: Setup node x86
|
||||
uses: actions/setup-node@v4
|
||||
if: matrix.settings.target == 'x86_64-pc-windows-msvc'
|
||||
@@ -110,8 +114,8 @@ jobs:
|
||||
with:
|
||||
name: bindings-${{ matrix.settings.target }}
|
||||
path: |
|
||||
bindings/javascript/${{ env.APP_NAME }}.*.node
|
||||
bindings/javascript/${{ env.APP_NAME }}.*.wasm
|
||||
bindings/javascript/packages/native/${{ env.APP_NAME }}.*.node
|
||||
bindings/javascript/packages/browser/${{ env.APP_NAME }}.*.wasm
|
||||
if-no-files-found: error
|
||||
test-linux-x64-gnu-binding:
|
||||
name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}
|
||||
@@ -131,20 +135,21 @@ jobs:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download artifacts
|
||||
- name: Build common
|
||||
run: yarn workspace @tursodatabase/database-common build
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bindings-x86_64-unknown-linux-gnu
|
||||
path: bindings/javascript
|
||||
path: bindings/javascript/packages
|
||||
merge-multiple: true
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
- name: Test bindings
|
||||
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn test
|
||||
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn workspace @tursodatabase/database test
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
@@ -156,35 +161,35 @@ jobs:
|
||||
uses: useblacksmith/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: create npm dirs
|
||||
run: yarn napi create-npm-dirs
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bindings/javascript/artifacts
|
||||
- name: Move artifacts
|
||||
run: yarn artifacts
|
||||
- name: List packages
|
||||
run: ls -R ./npm
|
||||
shell: bash
|
||||
path: bindings/javascript/packages
|
||||
merge-multiple: true
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Install dependencies
|
||||
run: yarn tsc-build
|
||||
- name: Publish
|
||||
if: "startsWith(github.ref, 'refs/tags/v')"
|
||||
run: |
|
||||
npm config set provenance true
|
||||
if git log -1 --pretty=%B | grep "^Turso [0-9]\+\.[0-9]\+\.[0-9]\+$";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
make publish-native
|
||||
make publish-browser
|
||||
npm publish --workspaces --access public
|
||||
elif git log -1 --pretty=%B | grep "^Turso [0-9]\+\.[0-9]\+\.[0-9]\+";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
make publish-native-next
|
||||
make publish-browser-next
|
||||
npm publish --workspaces --access public --tag next
|
||||
else
|
||||
echo "Not a release, skipping publish"
|
||||
echo "git log structure is unexpected, skip publishing"
|
||||
npm publish --workspaces --dry-run
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Publish (dry-run)
|
||||
if: "!startsWith(github.ref, 'refs/tags/v')"
|
||||
run: |
|
||||
npm publish --workspaces --dry-run
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -42,3 +42,5 @@ simulator.log
|
||||
**/*.txt
|
||||
profile.json.gz
|
||||
simulator-output/
|
||||
|
||||
&1
|
||||
|
||||
53
Cargo.lock
generated
53
Cargo.lock
generated
@@ -667,7 +667,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "core_tester"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -2126,7 +2126,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_completion"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"turso_ext",
|
||||
@@ -2134,7 +2134,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_crypto"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"data-encoding",
|
||||
@@ -2147,7 +2147,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_csv"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"csv",
|
||||
"mimalloc",
|
||||
@@ -2157,7 +2157,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_ipaddr"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"ipnetwork",
|
||||
"mimalloc",
|
||||
@@ -2166,7 +2166,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_percentile"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"turso_ext",
|
||||
@@ -2174,7 +2174,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_regexp"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"regex",
|
||||
@@ -2183,7 +2183,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_sim"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@@ -2216,7 +2216,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_sqlite_test_ext"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
@@ -2971,7 +2971,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "py-turso"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pyo3",
|
||||
@@ -3666,7 +3666,7 @@ checksum = "d372029cb5195f9ab4e4b9aef550787dce78b124fcaee8d82519925defcd6f0d"
|
||||
|
||||
[[package]]
|
||||
name = "sql_generation"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anarchist-readable-name-generator-lib 0.2.0",
|
||||
"anyhow",
|
||||
@@ -4176,7 +4176,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
@@ -4188,7 +4188,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso-java"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"jni",
|
||||
"thiserror 2.0.12",
|
||||
@@ -4197,7 +4197,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_cli"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@@ -4230,7 +4230,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_core"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"aegis",
|
||||
"aes",
|
||||
@@ -4289,7 +4289,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_dart"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"flutter_rust_bridge",
|
||||
"turso_core",
|
||||
@@ -4297,7 +4297,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_ext"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"getrandom 0.3.2",
|
||||
@@ -4306,7 +4306,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_ext_tests"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"env_logger 0.11.7",
|
||||
"lazy_static",
|
||||
@@ -4317,7 +4317,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_macros"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4326,18 +4326,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_node"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"napi",
|
||||
"napi-build",
|
||||
"napi-derive",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"turso_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "turso_parser"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"criterion",
|
||||
@@ -4353,7 +4354,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sqlite3"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"env_logger 0.11.7",
|
||||
"libc",
|
||||
@@ -4366,7 +4367,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sqlite3_parser"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"cc",
|
||||
@@ -4384,7 +4385,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_stress"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"anarchist-readable-name-generator-lib 0.1.2",
|
||||
"antithesis_sdk",
|
||||
@@ -4400,7 +4401,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sync_engine"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -4426,7 +4427,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sync_js"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
dependencies = [
|
||||
"genawaiter",
|
||||
"http",
|
||||
|
||||
34
Cargo.toml
34
Cargo.toml
@@ -33,29 +33,29 @@ members = [
|
||||
exclude = ["perf/latency/limbo"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5-pre.5"
|
||||
authors = ["the Limbo authors"]
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/tursodatabase/turso"
|
||||
|
||||
[workspace.dependencies]
|
||||
turso = { path = "bindings/rust", version = "0.1.5-pre.3" }
|
||||
turso_node = { path = "bindings/javascript", version = "0.1.5-pre.3" }
|
||||
limbo_completion = { path = "extensions/completion", version = "0.1.5-pre.3" }
|
||||
turso_core = { path = "core", version = "0.1.5-pre.3" }
|
||||
turso_sync_engine = { path = "sync/engine", version = "0.1.5-pre.3" }
|
||||
limbo_crypto = { path = "extensions/crypto", version = "0.1.5-pre.3" }
|
||||
limbo_csv = { path = "extensions/csv", version = "0.1.5-pre.3" }
|
||||
turso_ext = { path = "extensions/core", version = "0.1.5-pre.3" }
|
||||
turso_ext_tests = { path = "extensions/tests", version = "0.1.5-pre.3" }
|
||||
limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.5-pre.3" }
|
||||
turso_macros = { path = "macros", version = "0.1.5-pre.3" }
|
||||
limbo_percentile = { path = "extensions/percentile", version = "0.1.5-pre.3" }
|
||||
limbo_regexp = { path = "extensions/regexp", version = "0.1.5-pre.3" }
|
||||
turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.5-pre.3" }
|
||||
limbo_uuid = { path = "extensions/uuid", version = "0.1.5-pre.3" }
|
||||
turso_parser = { path = "parser", version = "0.1.5-pre.3" }
|
||||
turso = { path = "bindings/rust", version = "0.1.5-pre.5" }
|
||||
turso_node = { path = "bindings/javascript", version = "0.1.5-pre.5" }
|
||||
limbo_completion = { path = "extensions/completion", version = "0.1.5-pre.5" }
|
||||
turso_core = { path = "core", version = "0.1.5-pre.5" }
|
||||
turso_sync_engine = { path = "sync/engine", version = "0.1.5-pre.5" }
|
||||
limbo_crypto = { path = "extensions/crypto", version = "0.1.5-pre.5" }
|
||||
limbo_csv = { path = "extensions/csv", version = "0.1.5-pre.5" }
|
||||
turso_ext = { path = "extensions/core", version = "0.1.5-pre.5" }
|
||||
turso_ext_tests = { path = "extensions/tests", version = "0.1.5-pre.5" }
|
||||
limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.5-pre.5" }
|
||||
turso_macros = { path = "macros", version = "0.1.5-pre.5" }
|
||||
limbo_percentile = { path = "extensions/percentile", version = "0.1.5-pre.5" }
|
||||
limbo_regexp = { path = "extensions/regexp", version = "0.1.5-pre.5" }
|
||||
turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.5-pre.5" }
|
||||
limbo_uuid = { path = "extensions/uuid", version = "0.1.5-pre.5" }
|
||||
turso_parser = { path = "parser", version = "0.1.5-pre.5" }
|
||||
sql_generation = { path = "sql_generation" }
|
||||
strum = { version = "0.26", features = ["derive"] }
|
||||
strum_macros = "0.26"
|
||||
|
||||
2
bindings/javascript/.gitignore
vendored
2
bindings/javascript/.gitignore
vendored
@@ -197,4 +197,4 @@ Cargo.lock
|
||||
*.node
|
||||
*.wasm
|
||||
|
||||
package.native.json
|
||||
npm
|
||||
|
||||
@@ -11,3 +11,5 @@ yarn.lock
|
||||
.yarn
|
||||
__test__
|
||||
renovate.json
|
||||
examples
|
||||
perf
|
||||
|
||||
@@ -15,9 +15,11 @@ turso_core = { workspace = true }
|
||||
napi = { version = "3.1.3", default-features = false, features = ["napi6"] }
|
||||
napi-derive = { version = "3.1.1", default-features = true }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
tracing.workspace = true
|
||||
|
||||
[features]
|
||||
encryption = ["turso_core/encryption"]
|
||||
browser = []
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.2.3"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export * from '@tursodatabase/database-wasm32-wasi'
|
||||
@@ -1,398 +0,0 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-android-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-android-arm-eabi')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-win32-x64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./turso.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-win32-ia32-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-win32-arm64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./turso.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-darwin-universal')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-darwin-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-darwin-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-freebsd-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-freebsd-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-x64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-x64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm-musleabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm-gnueabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-riscv64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-riscv64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./turso.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-ppc64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./turso.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-s390x-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.linux-arm64-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm64-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.linux-x64-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-x64-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso.linux-arm-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/database-linux-arm-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./turso.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/database-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
4007
bindings/javascript/package-lock.json
generated
4007
bindings/javascript/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,59 +0,0 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser",
|
||||
"version": "0.1.5-pre.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"description": "The Turso database library specifically for browser/web environment",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"browser.js",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"dist/**"
|
||||
],
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
"binaryName": "turso",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"ava": "^6.0.1",
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "npm exec tsc && napi build --platform --release --esm",
|
||||
"build:debug": "npm exec tsc && napi build --platform",
|
||||
"prepublishOnly": "npm exec tsc && napi prepublish -t npm --skip-optional-publish",
|
||||
"test": "true",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"imports": {
|
||||
"#entry-point": {
|
||||
"types": "./index.d.ts",
|
||||
"browser": "./browser.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,64 +1,13 @@
|
||||
{
|
||||
"name": "@tursodatabase/database",
|
||||
"version": "0.1.5-pre.3",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"description": "The Turso database library",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"browser.js",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"dist/**"
|
||||
],
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
"binaryName": "turso",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"ava": "^6.0.1",
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "npm exec tsc && napi build --platform --release --esm",
|
||||
"build:debug": "npm exec tsc && napi build --platform",
|
||||
"prepublishOnly": "npm exec tsc && napi prepublish -t npm",
|
||||
"test": "true",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
"build": "npm run build --workspaces",
|
||||
"tsc-build": "npm run tsc-build --workspaces",
|
||||
"test": "npm run test --workspaces"
|
||||
},
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"imports": {
|
||||
"#entry-point": {
|
||||
"types": "./index.d.ts",
|
||||
"browser": "./browser.js",
|
||||
"node": "./index.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
],
|
||||
"version": "0.1.5-pre.5"
|
||||
}
|
||||
|
||||
124
bindings/javascript/packages/browser/README.md
Normal file
124
bindings/javascript/packages/browser/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Browser</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Browser.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database-browser
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
@@ -1,7 +1,7 @@
|
||||
import {
|
||||
createOnMessage as __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext as __emnapiGetDefaultContext,
|
||||
instantiateNapiModuleSync as __emnapiInstantiateNapiModuleSync,
|
||||
instantiateNapiModule as __emnapiInstantiateNapiModule,
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
@@ -23,19 +23,25 @@ const __sharedMemory = new WebAssembly.Memory({
|
||||
|
||||
const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
function panic(name) {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
napiModule: __napiModule,
|
||||
} = __emnapiInstantiateNapiModuleSync(__wasmFile, {
|
||||
} = await __emnapiInstantiateNapiModule(__wasmFile, {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: 4,
|
||||
asyncWorkPoolSize: 1,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(new URL('./wasi-worker-browser.mjs', import.meta.url), {
|
||||
const worker = new Worker(new URL('./worker.mjs', import.meta.url), {
|
||||
type: 'module',
|
||||
})
|
||||
|
||||
MainWorker = worker;
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
@@ -44,6 +50,13 @@ const {
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: __sharedMemory,
|
||||
is_web_worker: () => false,
|
||||
lookup_file: () => panic("lookup_file"),
|
||||
read: () => panic("read"),
|
||||
write: () => panic("write"),
|
||||
sync: () => panic("sync"),
|
||||
truncate: () => panic("truncate"),
|
||||
size: () => panic("size"),
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
@@ -57,4 +70,8 @@ const {
|
||||
})
|
||||
export default __napiModule.exports
|
||||
export const Database = __napiModule.exports.Database
|
||||
export const Opfs = __napiModule.exports.Opfs
|
||||
export const OpfsFile = __napiModule.exports.OpfsFile
|
||||
export const Statement = __napiModule.exports.Statement
|
||||
export const connect = __napiModule.exports.connect
|
||||
export const initThreadPool = __napiModule.exports.initThreadPool
|
||||
45
bindings/javascript/packages/browser/package.json
Normal file
45
bindings/javascript/packages/browser/package.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser",
|
||||
"version": "0.1.5-pre.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/promise.js",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"index.js",
|
||||
"worker.mjs",
|
||||
"turso.wasm32-wasi.wasm",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"playwright": "^1.55.0",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --features browser --release --platform --target wasm32-wasip1-threads --no-js --manifest-path ../../Cargo.toml --output-dir . && rm index.d.ts turso.wasi* wasi* browser.js",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "CI=1 vitest --browser=chromium --run && CI=1 vitest --browser=firefox --run"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "turso",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
}
|
||||
}
|
||||
95
bindings/javascript/packages/browser/promise.test.ts
Normal file
95
bindings/javascript/packages/browser/promise.test.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { expect, test, afterEach } from 'vitest'
|
||||
import { connect } from './promise.js'
|
||||
|
||||
test('in-memory db', async () => {
|
||||
const db = await connect(":memory:");
|
||||
await db.exec("CREATE TABLE t(x)");
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt = db.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
const rows = await stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
})
|
||||
|
||||
test('on-disk db', async () => {
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
const db1 = await connect(path);
|
||||
await db1.exec("CREATE TABLE t(x)");
|
||||
await db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt1 = db1.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt1.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows1 = await stmt1.all([1]);
|
||||
expect(rows1).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
await db1.close();
|
||||
stmt1.close();
|
||||
|
||||
const db2 = await connect(path);
|
||||
const stmt2 = db2.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt2.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows2 = await stmt2.all([1]);
|
||||
expect(rows2).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
db2.close();
|
||||
})
|
||||
|
||||
test('attach', async () => {
|
||||
const path1 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
const path2 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
const db1 = await connect(path1);
|
||||
await db1.exec("CREATE TABLE t(x)");
|
||||
await db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const db2 = await connect(path2);
|
||||
await db2.exec("CREATE TABLE q(x)");
|
||||
await db2.exec("INSERT INTO q VALUES (4), (5), (6)");
|
||||
|
||||
await db1.exec(`ATTACH '${path2}' as secondary`);
|
||||
|
||||
const stmt = db1.prepare("SELECT * FROM t UNION ALL SELECT * FROM secondary.q");
|
||||
expect(stmt.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows = await stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }, { x: 4 }, { x: 5 }, { x: 6 }]);
|
||||
})
|
||||
|
||||
test('blobs', async () => {
|
||||
const db = await connect(":memory:");
|
||||
const rows = await db.prepare("SELECT x'1020' as x").all();
|
||||
expect(rows).toEqual([{ x: new Uint8Array([16, 32]) }])
|
||||
})
|
||||
|
||||
|
||||
test('example-1', async () => {
|
||||
const db = await connect(':memory:');
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
expect(users).toEqual([
|
||||
{ id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
{ id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
})
|
||||
|
||||
test('example-2', async () => {
|
||||
const db = await connect(':memory:');
|
||||
await db.exec('CREATE TABLE users (name, email)');
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
|
||||
const rows = await db.prepare('SELECT * FROM users').all();
|
||||
expect(rows).toEqual([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
})
|
||||
78
bindings/javascript/packages/browser/promise.ts
Normal file
78
bindings/javascript/packages/browser/promise.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError } from "@tursodatabase/database-common"
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
|
||||
let workerRequestId = 0;
|
||||
class Database extends DatabasePromise {
|
||||
files: string[];
|
||||
constructor(db: NativeDatabase, files: string[], opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.files = files;
|
||||
}
|
||||
async close() {
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += this.files.length;
|
||||
|
||||
let tasks = [];
|
||||
for (const file of this.files) {
|
||||
(MainWorker as any).postMessage({ __turso__: "unregister", path: file, id: currentId });
|
||||
tasks.push(waitFor(currentId));
|
||||
currentId += 1;
|
||||
}
|
||||
await Promise.all(tasks);
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
function waitFor(id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => (MainWorker as any).removeEventListener("message", callback);
|
||||
|
||||
(MainWorker as any).addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(path: string, opts: DatabaseOpts = {}): Promise<Database> {
|
||||
if (path == ":memory:") {
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
return new Database(db, [], opts);
|
||||
}
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += 2;
|
||||
|
||||
let dbHandlePromise = waitFor(currentId);
|
||||
let walHandlePromise = waitFor(currentId + 1);
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}`, id: currentId });
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}-wal`, id: currentId + 1 });
|
||||
await Promise.all([dbHandlePromise, walHandlePromise]);
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
const files = [path, `${path}-wal`];
|
||||
return new Database(db, files, opts);
|
||||
}
|
||||
|
||||
export { connect, Database, SqliteError }
|
||||
21
bindings/javascript/packages/browser/tsconfig.json
Normal file
21
bindings/javascript/packages/browser/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
23
bindings/javascript/packages/browser/vitest.config.ts
Normal file
23
bindings/javascript/packages/browser/vitest.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
define: {
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false',
|
||||
},
|
||||
server: {
|
||||
headers: {
|
||||
"Cross-Origin-Embedder-Policy": "require-corp",
|
||||
"Cross-Origin-Opener-Policy": "same-origin"
|
||||
},
|
||||
},
|
||||
test: {
|
||||
browser: {
|
||||
enabled: true,
|
||||
provider: 'playwright',
|
||||
instances: [
|
||||
{ browser: 'chromium' },
|
||||
{ browser: 'firefox' }
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
160
bindings/javascript/packages/browser/worker.mjs
Normal file
160
bindings/javascript/packages/browser/worker.mjs
Normal file
@@ -0,0 +1,160 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
|
||||
var fileByPath = new Map();
|
||||
var fileByHandle = new Map();
|
||||
let fileHandles = 0;
|
||||
var memory = null;
|
||||
|
||||
function getUint8ArrayFromWasm(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
|
||||
async function registerFile(path) {
|
||||
if (fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
fileHandles += 1;
|
||||
fileByPath.set(path, { handle: fileHandles, sync: opfsSync });
|
||||
fileByHandle.set(fileHandles, opfsSync);
|
||||
}
|
||||
|
||||
async function unregisterFile(path) {
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
fileByPath.delete(path);
|
||||
fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
|
||||
function lookup_file(pathPtr, pathLen) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(pathPtr, pathLen);
|
||||
const notShared = new Uint8Array(buffer.length);
|
||||
notShared.set(buffer);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const path = decoder.decode(notShared);
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return -404;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', pathPtr, pathLen, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function read(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function write(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function sync(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
file.flush();
|
||||
return 0;
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function truncate(handle, size) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function size(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
const wasi = new WASI({
|
||||
print: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log.apply(console, arguments)
|
||||
},
|
||||
printErr: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error.apply(console, arguments)
|
||||
},
|
||||
})
|
||||
return instantiateNapiModuleSync(wasmModule, {
|
||||
childThread: true,
|
||||
wasi,
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: wasmMemory,
|
||||
is_web_worker: () => true,
|
||||
lookup_file: lookup_file,
|
||||
read: read,
|
||||
write: write,
|
||||
sync: sync,
|
||||
truncate: truncate,
|
||||
size: size,
|
||||
}
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await registerFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await unregisterFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
}
|
||||
handler.handle(e)
|
||||
}
|
||||
8
bindings/javascript/packages/common/README.md
Normal file
8
bindings/javascript/packages/common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/database` or `@tursodatabase/database-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
import { Database as NativeDB, Statement as NativeStatement } from "#entry-point";
|
||||
import { bindParams } from "./bind.js";
|
||||
|
||||
import { SqliteError } from "./sqlite-error.js";
|
||||
|
||||
// Step result constants
|
||||
const STEP_ROW = 1;
|
||||
const STEP_DONE = 2;
|
||||
const STEP_IO = 3;
|
||||
import { NativeDatabase, NativeStatement, STEP_IO, STEP_ROW, STEP_DONE } from "./types.js";
|
||||
|
||||
const convertibleErrorTypes = { TypeError };
|
||||
const CONVERTIBLE_ERROR_PREFIX = "[TURSO_CONVERT_TYPE]";
|
||||
@@ -35,7 +29,7 @@ function createErrorByName(name, message) {
|
||||
* Database represents a connection that can prepare and execute SQL statements.
|
||||
*/
|
||||
class Database {
|
||||
db: NativeDB;
|
||||
db: NativeDatabase;
|
||||
memory: boolean;
|
||||
open: boolean;
|
||||
private _inTransaction: boolean = false;
|
||||
@@ -50,15 +44,14 @@ class Database {
|
||||
* @param {boolean} [opts.fileMustExist=false] - If true, throws if database file does not exist.
|
||||
* @param {number} [opts.timeout=0] - Timeout duration in milliseconds for database operations. Defaults to 0 (no timeout).
|
||||
*/
|
||||
constructor(path: string, opts: any = {}) {
|
||||
constructor(db: NativeDatabase, opts: any = {}) {
|
||||
opts.readonly = opts.readonly === undefined ? false : opts.readonly;
|
||||
opts.fileMustExist =
|
||||
opts.fileMustExist === undefined ? false : opts.fileMustExist;
|
||||
opts.timeout = opts.timeout === undefined ? 0 : opts.timeout;
|
||||
|
||||
this.db = new NativeDB(path);
|
||||
this.db = db;
|
||||
this.memory = this.db.memory;
|
||||
const db = this.db;
|
||||
|
||||
Object.defineProperties(this, {
|
||||
inTransaction: {
|
||||
@@ -66,7 +59,7 @@ class Database {
|
||||
},
|
||||
name: {
|
||||
get() {
|
||||
return path;
|
||||
return db.path;
|
||||
},
|
||||
},
|
||||
readonly: {
|
||||
@@ -199,7 +192,7 @@ class Database {
|
||||
}
|
||||
|
||||
try {
|
||||
this.db.batch(sql);
|
||||
this.db.batchSync(sql);
|
||||
} catch (err) {
|
||||
throw convertError(err);
|
||||
}
|
||||
@@ -301,7 +294,7 @@ class Statement {
|
||||
this.stmt.reset();
|
||||
bindParams(this.stmt, bindParameters);
|
||||
for (; ;) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = this.stmt.stepSync();
|
||||
if (stepResult === STEP_IO) {
|
||||
this.db.db.ioLoopSync();
|
||||
continue;
|
||||
@@ -330,7 +323,7 @@ class Statement {
|
||||
this.stmt.reset();
|
||||
bindParams(this.stmt, bindParameters);
|
||||
for (; ;) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = this.stmt.stepSync();
|
||||
if (stepResult === STEP_IO) {
|
||||
this.db.db.ioLoopSync();
|
||||
continue;
|
||||
@@ -354,7 +347,7 @@ class Statement {
|
||||
bindParams(this.stmt, bindParameters);
|
||||
|
||||
while (true) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = this.stmt.stepSync();
|
||||
if (stepResult === STEP_IO) {
|
||||
this.db.db.ioLoopSync();
|
||||
continue;
|
||||
@@ -378,7 +371,7 @@ class Statement {
|
||||
bindParams(this.stmt, bindParameters);
|
||||
const rows: any[] = [];
|
||||
for (; ;) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = this.stmt.stepSync();
|
||||
if (stepResult === STEP_IO) {
|
||||
this.db.db.ioLoopSync();
|
||||
continue;
|
||||
@@ -417,4 +410,4 @@ class Statement {
|
||||
}
|
||||
}
|
||||
|
||||
export { Database, SqliteError }
|
||||
export { Database, Statement }
|
||||
6
bindings/javascript/packages/common/index.ts
Normal file
6
bindings/javascript/packages/common/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { NativeDatabase, NativeStatement, DatabaseOpts } from "./types.js";
|
||||
import { Database as DatabaseCompat, Statement as StatementCompat } from "./compat.js";
|
||||
import { Database as DatabasePromise, Statement as StatementPromise } from "./promise.js";
|
||||
import { SqliteError } from "./sqlite-error.js";
|
||||
|
||||
export { DatabaseCompat, StatementCompat, DatabasePromise, StatementPromise, NativeDatabase, NativeStatement, SqliteError, DatabaseOpts }
|
||||
25
bindings/javascript/packages/common/package.json
Normal file
25
bindings/javascript/packages/common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-common",
|
||||
"version": "0.1.5-pre.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,6 @@
|
||||
import { Database as NativeDB, Statement as NativeStatement } from "#entry-point";
|
||||
import { bindParams } from "./bind.js";
|
||||
|
||||
import { SqliteError } from "./sqlite-error.js";
|
||||
|
||||
// Step result constants
|
||||
const STEP_ROW = 1;
|
||||
const STEP_DONE = 2;
|
||||
const STEP_IO = 3;
|
||||
import { NativeDatabase, NativeStatement, STEP_IO, STEP_ROW, STEP_DONE, DatabaseOpts } from "./types.js";
|
||||
|
||||
const convertibleErrorTypes = { TypeError };
|
||||
const CONVERTIBLE_ERROR_PREFIX = "[TURSO_CONVERT_TYPE]";
|
||||
@@ -35,7 +29,7 @@ function createErrorByName(name, message) {
|
||||
* Database represents a connection that can prepare and execute SQL statements.
|
||||
*/
|
||||
class Database {
|
||||
db: NativeDB;
|
||||
db: NativeDatabase;
|
||||
memory: boolean;
|
||||
open: boolean;
|
||||
private _inTransaction: boolean = false;
|
||||
@@ -49,19 +43,18 @@ class Database {
|
||||
* @param {boolean} [opts.fileMustExist=false] - If true, throws if database file does not exist.
|
||||
* @param {number} [opts.timeout=0] - Timeout duration in milliseconds for database operations. Defaults to 0 (no timeout).
|
||||
*/
|
||||
constructor(path: string, opts: any = {}) {
|
||||
constructor(db: NativeDatabase, opts: DatabaseOpts = {}) {
|
||||
opts.readonly = opts.readonly === undefined ? false : opts.readonly;
|
||||
opts.fileMustExist =
|
||||
opts.fileMustExist === undefined ? false : opts.fileMustExist;
|
||||
opts.timeout = opts.timeout === undefined ? 0 : opts.timeout;
|
||||
|
||||
const db = new NativeDB(path);
|
||||
this.initialize(db, opts.path, opts.readonly);
|
||||
this.initialize(db, opts.name, opts.readonly);
|
||||
}
|
||||
static create() {
|
||||
return Object.create(this.prototype);
|
||||
}
|
||||
initialize(db: NativeDB, name, readonly) {
|
||||
initialize(db: NativeDatabase, name, readonly) {
|
||||
this.db = db;
|
||||
this.memory = db.memory;
|
||||
Object.defineProperties(this, {
|
||||
@@ -112,22 +105,22 @@ class Database {
|
||||
*
|
||||
* @param {function} fn - The function to wrap in a transaction.
|
||||
*/
|
||||
transaction(fn) {
|
||||
transaction(fn: (...any) => Promise<any>) {
|
||||
if (typeof fn !== "function")
|
||||
throw new TypeError("Expected first argument to be a function");
|
||||
|
||||
const db = this;
|
||||
const wrapTxn = (mode) => {
|
||||
return (...bindParameters) => {
|
||||
db.exec("BEGIN " + mode);
|
||||
return async (...bindParameters) => {
|
||||
await db.exec("BEGIN " + mode);
|
||||
db._inTransaction = true;
|
||||
try {
|
||||
const result = fn(...bindParameters);
|
||||
db.exec("COMMIT");
|
||||
const result = await fn(...bindParameters);
|
||||
await db.exec("COMMIT");
|
||||
db._inTransaction = false;
|
||||
return result;
|
||||
} catch (err) {
|
||||
db.exec("ROLLBACK");
|
||||
await db.exec("ROLLBACK");
|
||||
db._inTransaction = false;
|
||||
throw err;
|
||||
}
|
||||
@@ -147,7 +140,7 @@ class Database {
|
||||
return properties.default.value;
|
||||
}
|
||||
|
||||
pragma(source, options) {
|
||||
async pragma(source, options) {
|
||||
if (options == null) options = {};
|
||||
|
||||
if (typeof source !== "string")
|
||||
@@ -158,8 +151,8 @@ class Database {
|
||||
|
||||
const pragma = `PRAGMA ${source}`;
|
||||
|
||||
const stmt = this.prepare(pragma);
|
||||
const results = stmt.all();
|
||||
const stmt = await this.prepare(pragma);
|
||||
const results = await stmt.all();
|
||||
|
||||
return results;
|
||||
}
|
||||
@@ -197,13 +190,13 @@ class Database {
|
||||
*
|
||||
* @param {string} sql - The SQL statement string to execute.
|
||||
*/
|
||||
exec(sql) {
|
||||
async exec(sql) {
|
||||
if (!this.open) {
|
||||
throw new TypeError("The database connection is not open");
|
||||
}
|
||||
|
||||
try {
|
||||
this.db.batch(sql);
|
||||
await this.db.batchAsync(sql);
|
||||
} catch (err) {
|
||||
throw convertError(err);
|
||||
}
|
||||
@@ -228,7 +221,7 @@ class Database {
|
||||
/**
|
||||
* Closes the database connection.
|
||||
*/
|
||||
close() {
|
||||
async close() {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
@@ -305,7 +298,7 @@ class Statement {
|
||||
bindParams(this.stmt, bindParameters);
|
||||
|
||||
while (true) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = await this.stmt.stepAsync();
|
||||
if (stepResult === STEP_IO) {
|
||||
await this.db.db.ioLoopAsync();
|
||||
continue;
|
||||
@@ -335,7 +328,7 @@ class Statement {
|
||||
bindParams(this.stmt, bindParameters);
|
||||
|
||||
while (true) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = await this.stmt.stepAsync();
|
||||
if (stepResult === STEP_IO) {
|
||||
await this.db.db.ioLoopAsync();
|
||||
continue;
|
||||
@@ -359,7 +352,7 @@ class Statement {
|
||||
bindParams(this.stmt, bindParameters);
|
||||
|
||||
while (true) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = await this.stmt.stepAsync();
|
||||
if (stepResult === STEP_IO) {
|
||||
await this.db.db.ioLoopAsync();
|
||||
continue;
|
||||
@@ -384,7 +377,7 @@ class Statement {
|
||||
const rows: any[] = [];
|
||||
|
||||
while (true) {
|
||||
const stepResult = this.stmt.step();
|
||||
const stepResult = await this.stmt.stepAsync();
|
||||
if (stepResult === STEP_IO) {
|
||||
await this.db.db.ioLoopAsync();
|
||||
continue;
|
||||
@@ -421,17 +414,9 @@ class Statement {
|
||||
throw convertError(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(path: string, opts: any = {}): Promise<Database> {
|
||||
return new Database(path, opts);
|
||||
close() {
|
||||
this.stmt.finalize();
|
||||
}
|
||||
}
|
||||
|
||||
export { Database, SqliteError, connect }
|
||||
export { Database, Statement }
|
||||
@@ -1,17 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#entry-point": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
46
bindings/javascript/packages/common/types.ts
Normal file
46
bindings/javascript/packages/common/types.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
export interface DatabaseOpts {
|
||||
readonly?: boolean,
|
||||
fileMustExist?: boolean,
|
||||
timeout?: number
|
||||
name?: string
|
||||
tracing?: 'info' | 'debug' | 'trace'
|
||||
}
|
||||
|
||||
export interface NativeDatabase {
|
||||
memory: boolean,
|
||||
path: string,
|
||||
new(path: string): NativeDatabase;
|
||||
batchSync(sql: string);
|
||||
batchAsync(sql: string): Promise<void>;
|
||||
|
||||
ioLoopSync();
|
||||
ioLoopAsync(): Promise<void>;
|
||||
|
||||
prepare(sql: string): NativeStatement;
|
||||
|
||||
pluck(pluckMode: boolean);
|
||||
defaultSafeIntegers(toggle: boolean);
|
||||
totalChanges(): number;
|
||||
changes(): number;
|
||||
lastInsertRowid(): number;
|
||||
close();
|
||||
}
|
||||
|
||||
|
||||
// Step result constants
|
||||
export const STEP_ROW = 1;
|
||||
export const STEP_DONE = 2;
|
||||
export const STEP_IO = 3;
|
||||
|
||||
export interface NativeStatement {
|
||||
stepAsync(): Promise<number>;
|
||||
stepSync(): number;
|
||||
|
||||
pluck(pluckMode: boolean);
|
||||
safeIntegers(toggle: boolean);
|
||||
raw(toggle: boolean);
|
||||
columns(): string[];
|
||||
row(): any;
|
||||
reset();
|
||||
finalize();
|
||||
}
|
||||
125
bindings/javascript/packages/native/README.md
Normal file
125
bindings/javascript/packages/native/README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Node</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Node.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
- **Cross-platform**: Supports Linux (x86 and arm64), macOS, Windows (browser is supported in the separate package `@tursodatabase/database-browser` package)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
67
bindings/javascript/packages/native/compat.test.ts
Normal file
67
bindings/javascript/packages/native/compat.test.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { expect, test } from 'vitest'
|
||||
import { Database } from './compat.js'
|
||||
|
||||
test('in-memory db', () => {
|
||||
const db = new Database(":memory:");
|
||||
db.exec("CREATE TABLE t(x)");
|
||||
db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt = db.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
const rows = stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
})
|
||||
|
||||
test('on-disk db', () => {
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
const db1 = new Database(path);
|
||||
db1.exec("CREATE TABLE t(x)");
|
||||
db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt1 = db1.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt1.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows1 = stmt1.all([1]);
|
||||
expect(rows1).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
db1.close();
|
||||
|
||||
const db2 = new Database(path);
|
||||
const stmt2 = db2.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt2.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows2 = stmt2.all([1]);
|
||||
expect(rows2).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
db2.close();
|
||||
} finally {
|
||||
unlinkSync(path);
|
||||
unlinkSync(`${path}-wal`);
|
||||
}
|
||||
})
|
||||
|
||||
test('attach', () => {
|
||||
const path1 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
const path2 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
const db1 = new Database(path1);
|
||||
db1.exec("CREATE TABLE t(x)");
|
||||
db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const db2 = new Database(path2);
|
||||
db2.exec("CREATE TABLE q(x)");
|
||||
db2.exec("INSERT INTO q VALUES (4), (5), (6)");
|
||||
|
||||
db1.exec(`ATTACH '${path2}' as secondary`);
|
||||
|
||||
const stmt = db1.prepare("SELECT * FROM t UNION ALL SELECT * FROM secondary.q");
|
||||
expect(stmt.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows = stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }, { x: 4 }, { x: 5 }, { x: 6 }]);
|
||||
} finally {
|
||||
unlinkSync(path1);
|
||||
unlinkSync(`${path1}-wal`);
|
||||
unlinkSync(path2);
|
||||
unlinkSync(`${path2}-wal`);
|
||||
}
|
||||
})
|
||||
|
||||
test('blobs', () => {
|
||||
const db = new Database(":memory:");
|
||||
const rows = db.prepare("SELECT x'1020' as x").all();
|
||||
expect(rows).toEqual([{ x: Buffer.from([16, 32]) }])
|
||||
})
|
||||
10
bindings/javascript/packages/native/compat.ts
Normal file
10
bindings/javascript/packages/native/compat.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { DatabaseCompat, NativeDatabase, SqliteError, DatabaseOpts } from "@tursodatabase/database-common"
|
||||
import { Database as NativeDB } from "#index";
|
||||
|
||||
class Database extends DatabaseCompat {
|
||||
constructor(path: string, opts: DatabaseOpts = {}) {
|
||||
super(new NativeDB(path, { tracing: opts.tracing }) as unknown as NativeDatabase, opts)
|
||||
}
|
||||
}
|
||||
|
||||
export { Database, SqliteError }
|
||||
@@ -8,13 +8,13 @@ export declare class Database {
|
||||
* # Arguments
|
||||
* * `path` - The path to the database file.
|
||||
*/
|
||||
constructor(path: string)
|
||||
constructor(path: string, opts?: DatabaseOpts | undefined | null)
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get memory(): boolean
|
||||
/** Returns whether the database connection is open. */
|
||||
get open(): boolean
|
||||
/**
|
||||
* Executes a batch of SQL statements.
|
||||
* Executes a batch of SQL statements on main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
@@ -22,7 +22,17 @@ export declare class Database {
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batch(sql: string): void
|
||||
batchSync(sql: string): void
|
||||
/**
|
||||
* Executes a batch of SQL statements outside of main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statements to execute.
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchAsync(sql: string): Promise<unknown>
|
||||
/**
|
||||
* Prepares a statement for execution.
|
||||
*
|
||||
@@ -105,10 +115,15 @@ export declare class Statement {
|
||||
*/
|
||||
bindAt(index: number, value: unknown): void
|
||||
/**
|
||||
* Step the statement and return result code:
|
||||
* Step the statement and return result code (executed on the main thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
step(): number
|
||||
stepSync(): number
|
||||
/**
|
||||
* Step the statement and return result code (executed on the background thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepAsync(): Promise<unknown>
|
||||
/** Get the current row data according to the presentation mode */
|
||||
row(): unknown
|
||||
/** Sets the presentation mode to raw. */
|
||||
@@ -128,3 +143,7 @@ export declare class Statement {
|
||||
/** Finalizes the statement. */
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
513
bindings/javascript/packages/native/index.js
Normal file
513
bindings/javascript/packages/native/index.js
Normal file
@@ -0,0 +1,513 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-android-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-android-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-android-arm-eabi')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-android-arm-eabi/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-x64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-x64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./turso.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-ia32-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-ia32-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-arm64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-arm64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./turso.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-universal')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-universal/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-freebsd-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-freebsd-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-freebsd-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-freebsd-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-x64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-x64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-x64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-x64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm-musleabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm-musleabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm-gnueabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm-gnueabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-riscv64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-riscv64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-riscv64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-riscv64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./turso.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-ppc64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-ppc64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./turso.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-s390x-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-s390x-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso.openharmony-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso.openharmony-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso.openharmony-arm.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-arm')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-arm/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./turso.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/database-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
52
bindings/javascript/packages/native/package.json
Normal file
52
bindings/javascript/packages/native/package.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "@tursodatabase/database",
|
||||
"version": "0.1.5-pre.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"license": "MIT",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --platform --release --esm --manifest-path ../../Cargo.toml --output-dir .",
|
||||
"napi-dirs": "napi create-npm-dirs",
|
||||
"napi-artifacts": "napi artifacts --output-dir .",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "vitest --run",
|
||||
"prepublishOnly": "npm run napi-dirs && npm run napi-artifacts && napi prepublish -t npm"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "turso",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
}
|
||||
}
|
||||
107
bindings/javascript/packages/native/promise.test.ts
Normal file
107
bindings/javascript/packages/native/promise.test.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect } from './promise.js'
|
||||
|
||||
test('in-memory db', async () => {
|
||||
const db = await connect(":memory:");
|
||||
await db.exec("CREATE TABLE t(x)");
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt = db.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
const rows = await stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
})
|
||||
|
||||
test('on-disk db', async () => {
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
const db1 = await connect(path);
|
||||
await db1.exec("CREATE TABLE t(x)");
|
||||
await db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const stmt1 = db1.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt1.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows1 = await stmt1.all([1]);
|
||||
expect(rows1).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
db1.close();
|
||||
|
||||
const db2 = await connect(path);
|
||||
const stmt2 = db2.prepare("SELECT * FROM t WHERE x % 2 = ?");
|
||||
expect(stmt2.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows2 = await stmt2.all([1]);
|
||||
expect(rows2).toEqual([{ x: 1 }, { x: 3 }]);
|
||||
db2.close();
|
||||
} finally {
|
||||
unlinkSync(path);
|
||||
unlinkSync(`${path}-wal`);
|
||||
}
|
||||
})
|
||||
|
||||
test('attach', async () => {
|
||||
const path1 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
const path2 = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
const db1 = await connect(path1);
|
||||
await db1.exec("CREATE TABLE t(x)");
|
||||
await db1.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
const db2 = await connect(path2);
|
||||
await db2.exec("CREATE TABLE q(x)");
|
||||
await db2.exec("INSERT INTO q VALUES (4), (5), (6)");
|
||||
|
||||
await db1.exec(`ATTACH '${path2}' as secondary`);
|
||||
|
||||
const stmt = db1.prepare("SELECT * FROM t UNION ALL SELECT * FROM secondary.q");
|
||||
expect(stmt.columns()).toEqual([{ name: "x", column: null, database: null, table: null, type: null }]);
|
||||
const rows = await stmt.all([1]);
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }, { x: 4 }, { x: 5 }, { x: 6 }]);
|
||||
} finally {
|
||||
unlinkSync(path1);
|
||||
unlinkSync(`${path1}-wal`);
|
||||
unlinkSync(path2);
|
||||
unlinkSync(`${path2}-wal`);
|
||||
}
|
||||
})
|
||||
|
||||
test('blobs', async () => {
|
||||
const db = await connect(":memory:");
|
||||
const rows = await db.prepare("SELECT x'1020' as x").all();
|
||||
expect(rows).toEqual([{ x: Buffer.from([16, 32]) }])
|
||||
})
|
||||
|
||||
|
||||
test('example-1', async () => {
|
||||
const db = await connect(':memory:');
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
expect(users).toEqual([
|
||||
{ id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
{ id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
})
|
||||
|
||||
test('example-2', async () => {
|
||||
const db = await connect(':memory:');
|
||||
await db.exec('CREATE TABLE users (name, email)');
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
|
||||
const rows = await db.prepare('SELECT * FROM users').all();
|
||||
expect(rows).toEqual([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
})
|
||||
21
bindings/javascript/packages/native/promise.ts
Normal file
21
bindings/javascript/packages/native/promise.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { DatabasePromise, NativeDatabase, SqliteError, DatabaseOpts } from "@tursodatabase/database-common"
|
||||
import { Database as NativeDB } from "#index";
|
||||
|
||||
class Database extends DatabasePromise {
|
||||
constructor(path: string, opts: DatabaseOpts = {}) {
|
||||
super(new NativeDB(path, { tracing: opts.tracing }) as unknown as NativeDatabase, opts)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(path: string, opts: any = {}): Promise<Database> {
|
||||
return new Database(path, opts);
|
||||
}
|
||||
|
||||
export { connect, Database, SqliteError }
|
||||
21
bindings/javascript/packages/native/tsconfig.json
Normal file
21
bindings/javascript/packages/native/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
28
bindings/javascript/perf/package-lock.json
generated
28
bindings/javascript/perf/package-lock.json
generated
@@ -6,28 +6,34 @@
|
||||
"": {
|
||||
"name": "turso-perf",
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "..",
|
||||
"@tursodatabase/database": "../packages/native",
|
||||
"better-sqlite3": "^9.5.0",
|
||||
"mitata": "^0.1.11"
|
||||
}
|
||||
},
|
||||
"..": {
|
||||
"workspaces": [
|
||||
"packages/core",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
]
|
||||
},
|
||||
"../packages/native": {
|
||||
"name": "@tursodatabase/database",
|
||||
"version": "0.1.4-pre.4",
|
||||
"version": "0.1.5-pre.3",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"ava": "^6.0.1",
|
||||
"better-sqlite3": "^11.9.1",
|
||||
"typescript": "^5.9.2"
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@tursodatabase/database": {
|
||||
"resolved": "..",
|
||||
"resolved": "../packages/native",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
"name": "turso-perf",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"better-sqlite3": "^9.5.0",
|
||||
"@tursodatabase/database": "..",
|
||||
"@tursodatabase/database": "../packages/native",
|
||||
"mitata": "^0.1.11"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { run, bench, group, baseline } from 'mitata';
|
||||
|
||||
import Database from '@tursodatabase/database';
|
||||
import { Database } from '@tursodatabase/database/compat';
|
||||
|
||||
const db = new Database(':memory:');
|
||||
|
||||
|
||||
254
bindings/javascript/src/browser.rs
Normal file
254
bindings/javascript/src/browser.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use turso_core::{storage::database::DatabaseFile, Clock, File, Instant, IO};
|
||||
|
||||
use crate::{init_tracing, is_memory, Database, DatabaseOpts};
|
||||
|
||||
pub struct NoopTask;
|
||||
|
||||
impl Task for NoopTask {
|
||||
type Output = ();
|
||||
type JsValue = ();
|
||||
fn compute(&mut self) -> Result<Self::Output> {
|
||||
Ok(())
|
||||
}
|
||||
fn resolve(&mut self, _: Env, _: Self::Output) -> Result<Self::JsValue> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
/// turso-db in the the browser requires explicit thread pool initialization
|
||||
/// so, we just put no-op task on the thread pool and force emnapi to allocate web worker
|
||||
pub fn init_thread_pool() -> napi::Result<AsyncTask<NoopTask>> {
|
||||
Ok(AsyncTask::new(NoopTask))
|
||||
}
|
||||
|
||||
pub struct ConnectTask {
|
||||
path: String,
|
||||
is_memory: bool,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
}
|
||||
|
||||
pub struct ConnectResult {
|
||||
db: Arc<turso_core::Database>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
}
|
||||
|
||||
unsafe impl Send for ConnectResult {}
|
||||
|
||||
impl Task for ConnectTask {
|
||||
type Output = ConnectResult;
|
||||
type JsValue = Database;
|
||||
|
||||
fn compute(&mut self) -> Result<Self::Output> {
|
||||
let file = self
|
||||
.io
|
||||
.open_file(&self.path, turso_core::OpenFlags::Create, false)
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to open file: {e}")))?;
|
||||
|
||||
let db_file = Arc::new(DatabaseFile::new(file));
|
||||
let db = turso_core::Database::open(self.io.clone(), &self.path, db_file, false, true)
|
||||
.map_err(|e| {
|
||||
Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("Failed to open database: {e}"),
|
||||
)
|
||||
})?;
|
||||
|
||||
let conn = db
|
||||
.connect()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to connect: {e}")))?;
|
||||
|
||||
Ok(ConnectResult { db, conn })
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: Env, result: Self::Output) -> Result<Self::JsValue> {
|
||||
Ok(Database::create(
|
||||
Some(result.db),
|
||||
self.io.clone(),
|
||||
result.conn,
|
||||
self.is_memory,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
// we offload connect to the web-worker because:
|
||||
// 1. browser main-thread do not support Atomic.wait operations
|
||||
// 2. turso-db use blocking IO [io.wait_for_completion(c)] in few places during initialization path
|
||||
//
|
||||
// so, we offload connect to the worker thread
|
||||
pub fn connect(path: String, opts: Option<DatabaseOpts>) -> Result<AsyncTask<ConnectTask>> {
|
||||
if let Some(opts) = opts {
|
||||
init_tracing(opts.tracing);
|
||||
}
|
||||
let task = if is_memory(&path) {
|
||||
ConnectTask {
|
||||
io: Arc::new(turso_core::MemoryIO::new()),
|
||||
is_memory: true,
|
||||
path,
|
||||
}
|
||||
} else {
|
||||
let io = Arc::new(Opfs::new()?);
|
||||
ConnectTask {
|
||||
io,
|
||||
is_memory: false,
|
||||
path,
|
||||
}
|
||||
};
|
||||
Ok(AsyncTask::new(task))
|
||||
}
|
||||
#[napi]
|
||||
#[derive(Clone)]
|
||||
pub struct Opfs;
|
||||
|
||||
#[napi]
|
||||
#[derive(Clone)]
|
||||
struct OpfsFile {
|
||||
handle: i32,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl Opfs {
|
||||
#[napi(constructor)]
|
||||
pub fn new() -> napi::Result<Self> {
|
||||
Ok(Self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clock for Opfs {
|
||||
fn now(&self) -> Instant {
|
||||
Instant { secs: 0, micros: 0 } // TODO
|
||||
}
|
||||
}
|
||||
|
||||
#[link(wasm_import_module = "env")]
|
||||
extern "C" {
|
||||
fn lookup_file(path: *const u8, path_len: usize) -> i32;
|
||||
fn read(handle: i32, buffer: *mut u8, buffer_len: usize, offset: i32) -> i32;
|
||||
fn write(handle: i32, buffer: *const u8, buffer_len: usize, offset: i32) -> i32;
|
||||
fn sync(handle: i32) -> i32;
|
||||
fn truncate(handle: i32, length: usize) -> i32;
|
||||
fn size(handle: i32) -> i32;
|
||||
fn is_web_worker() -> bool;
|
||||
}
|
||||
|
||||
fn is_web_worker_safe() -> bool {
|
||||
unsafe { is_web_worker() }
|
||||
}
|
||||
|
||||
impl IO for Opfs {
|
||||
fn open_file(
|
||||
&self,
|
||||
path: &str,
|
||||
_: turso_core::OpenFlags,
|
||||
_: bool,
|
||||
) -> turso_core::Result<std::sync::Arc<dyn turso_core::File>> {
|
||||
tracing::info!("open_file: {}", path);
|
||||
let result = unsafe { lookup_file(path.as_ptr(), path.len()) };
|
||||
if result >= 0 {
|
||||
Ok(Arc::new(OpfsFile { handle: result }))
|
||||
} else if result == -404 {
|
||||
Err(turso_core::LimboError::InternalError(
|
||||
"files must be created in advance for OPFS IO".to_string(),
|
||||
))
|
||||
} else {
|
||||
Err(turso_core::LimboError::InternalError(format!(
|
||||
"unexpected file lookup error: {result}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_file(&self, _: &str) -> turso_core::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl File for OpfsFile {
|
||||
fn lock_file(&self, _: bool) -> turso_core::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unlock_file(&self) -> turso_core::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pread(
|
||||
&self,
|
||||
pos: u64,
|
||||
c: turso_core::Completion,
|
||||
) -> turso_core::Result<turso_core::Completion> {
|
||||
assert!(
|
||||
is_web_worker_safe(),
|
||||
"opfs must be used only from web worker for now"
|
||||
);
|
||||
tracing::debug!("pread({}): pos={}", self.handle, pos);
|
||||
let handle = self.handle;
|
||||
let read_c = c.as_read();
|
||||
let buffer = read_c.buf_arc();
|
||||
let buffer = buffer.as_mut_slice();
|
||||
let result = unsafe { read(handle, buffer.as_mut_ptr(), buffer.len(), pos as i32) };
|
||||
c.complete(result as i32);
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
fn pwrite(
|
||||
&self,
|
||||
pos: u64,
|
||||
buffer: Arc<turso_core::Buffer>,
|
||||
c: turso_core::Completion,
|
||||
) -> turso_core::Result<turso_core::Completion> {
|
||||
assert!(
|
||||
is_web_worker_safe(),
|
||||
"opfs must be used only from web worker for now"
|
||||
);
|
||||
tracing::debug!("pwrite({}): pos={}", self.handle, pos);
|
||||
let handle = self.handle;
|
||||
let buffer = buffer.as_slice();
|
||||
let result = unsafe { write(handle, buffer.as_ptr(), buffer.len(), pos as i32) };
|
||||
c.complete(result as i32);
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
fn sync(&self, c: turso_core::Completion) -> turso_core::Result<turso_core::Completion> {
|
||||
assert!(
|
||||
is_web_worker_safe(),
|
||||
"opfs must be used only from web worker for now"
|
||||
);
|
||||
tracing::debug!("sync({})", self.handle);
|
||||
let handle = self.handle;
|
||||
let result = unsafe { sync(handle) };
|
||||
c.complete(result as i32);
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
fn truncate(
|
||||
&self,
|
||||
len: u64,
|
||||
c: turso_core::Completion,
|
||||
) -> turso_core::Result<turso_core::Completion> {
|
||||
assert!(
|
||||
is_web_worker_safe(),
|
||||
"opfs must be used only from web worker for now"
|
||||
);
|
||||
tracing::debug!("truncate({}): len={}", self.handle, len);
|
||||
let handle = self.handle;
|
||||
let result = unsafe { truncate(handle, len as usize) };
|
||||
c.complete(result as i32);
|
||||
Ok(c)
|
||||
}
|
||||
|
||||
fn size(&self) -> turso_core::Result<u64> {
|
||||
assert!(
|
||||
is_web_worker_safe(),
|
||||
"size can be called only from web worker context"
|
||||
);
|
||||
tracing::debug!("size({})", self.handle);
|
||||
let handle = self.handle;
|
||||
let result = unsafe { size(handle) };
|
||||
Ok(result as u64)
|
||||
}
|
||||
}
|
||||
@@ -10,14 +10,20 @@
|
||||
//! - Iterating through query results
|
||||
//! - Managing the I/O event loop
|
||||
|
||||
#[cfg(feature = "browser")]
|
||||
pub mod browser;
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi::{Env, Task};
|
||||
use napi_derive::napi;
|
||||
use std::sync::OnceLock;
|
||||
use std::{
|
||||
cell::{Cell, RefCell},
|
||||
num::NonZeroUsize,
|
||||
sync::Arc,
|
||||
};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
|
||||
/// Step result constants
|
||||
const STEP_ROW: u32 = 1;
|
||||
@@ -38,12 +44,107 @@ enum PresentationMode {
|
||||
pub struct Database {
|
||||
_db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
conn: Option<Arc<turso_core::Connection>>,
|
||||
is_memory: bool,
|
||||
is_open: Cell<bool>,
|
||||
default_safe_integers: Cell<bool>,
|
||||
}
|
||||
|
||||
pub(crate) fn is_memory(path: &str) -> bool {
|
||||
path == ":memory:"
|
||||
}
|
||||
|
||||
static TRACING_INIT: OnceLock<()> = OnceLock::new();
|
||||
pub(crate) fn init_tracing(level_filter: Option<String>) {
|
||||
let Some(level_filter) = level_filter else {
|
||||
return;
|
||||
};
|
||||
let level_filter = match level_filter.as_ref() {
|
||||
"info" => LevelFilter::INFO,
|
||||
"debug" => LevelFilter::DEBUG,
|
||||
"trace" => LevelFilter::TRACE,
|
||||
_ => return,
|
||||
};
|
||||
TRACING_INIT.get_or_init(|| {
|
||||
tracing_subscriber::fmt()
|
||||
.with_ansi(false)
|
||||
.with_thread_ids(true)
|
||||
.with_span_events(FmtSpan::ACTIVE)
|
||||
.with_max_level(level_filter)
|
||||
.init();
|
||||
});
|
||||
}
|
||||
|
||||
pub enum DbTask {
|
||||
Batch {
|
||||
conn: Arc<turso_core::Connection>,
|
||||
sql: String,
|
||||
},
|
||||
Step {
|
||||
stmt: Arc<RefCell<Option<turso_core::Statement>>>,
|
||||
},
|
||||
}
|
||||
|
||||
unsafe impl Send for DbTask {}
|
||||
|
||||
impl Task for DbTask {
|
||||
type Output = u32;
|
||||
type JsValue = u32;
|
||||
|
||||
fn compute(&mut self) -> Result<Self::Output> {
|
||||
match self {
|
||||
DbTask::Batch { conn, sql } => {
|
||||
batch_sync(conn, sql)?;
|
||||
Ok(0)
|
||||
}
|
||||
DbTask::Step { stmt } => step_sync(stmt),
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: Env, output: Self::Output) -> Result<Self::JsValue> {
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
pub struct DatabaseOpts {
|
||||
pub tracing: Option<String>,
|
||||
}
|
||||
|
||||
fn batch_sync(conn: &Arc<turso_core::Connection>, sql: &str) -> napi::Result<()> {
|
||||
conn.prepare_execute_batch(sql).map_err(|e| {
|
||||
Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("Failed to execute batch: {e}"),
|
||||
)
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn step_sync(stmt: &Arc<RefCell<Option<turso_core::Statement>>>) -> napi::Result<u32> {
|
||||
let mut stmt_ref = stmt.borrow_mut();
|
||||
let stmt = stmt_ref
|
||||
.as_mut()
|
||||
.ok_or_else(|| Error::new(Status::GenericFailure, "Statement has been finalized"))?;
|
||||
|
||||
match stmt.step() {
|
||||
Ok(turso_core::StepResult::Row) => Ok(STEP_ROW),
|
||||
Ok(turso_core::StepResult::IO) => Ok(STEP_IO),
|
||||
Ok(turso_core::StepResult::Done) => Ok(STEP_DONE),
|
||||
Ok(turso_core::StepResult::Interrupt) => Err(Error::new(
|
||||
Status::GenericFailure,
|
||||
"Statement was interrupted",
|
||||
)),
|
||||
Ok(turso_core::StepResult::Busy) => {
|
||||
Err(Error::new(Status::GenericFailure, "Database is busy"))
|
||||
}
|
||||
Err(e) => Err(Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("Step failed: {e}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl Database {
|
||||
/// Creates a new database instance.
|
||||
@@ -51,9 +152,11 @@ impl Database {
|
||||
/// # Arguments
|
||||
/// * `path` - The path to the database file.
|
||||
#[napi(constructor)]
|
||||
pub fn new(path: String) -> Result<Self> {
|
||||
let is_memory = path == ":memory:";
|
||||
let io: Arc<dyn turso_core::IO> = if is_memory {
|
||||
pub fn new(path: String, opts: Option<DatabaseOpts>) -> Result<Self> {
|
||||
if let Some(opts) = opts {
|
||||
init_tracing(opts.tracing);
|
||||
}
|
||||
let io: Arc<dyn turso_core::IO> = if is_memory(&path) {
|
||||
Arc::new(turso_core::MemoryIO::new())
|
||||
} else {
|
||||
Arc::new(turso_core::PlatformIO::new().map_err(|e| {
|
||||
@@ -61,6 +164,11 @@ impl Database {
|
||||
})?)
|
||||
};
|
||||
|
||||
#[cfg(feature = "browser")]
|
||||
if !is_memory(&path) {
|
||||
return Err(Error::new(Status::GenericFailure, "sync constructor is not supported for FS-backed databases in the WASM. Use async connect(...) method instead".to_string()));
|
||||
}
|
||||
|
||||
let file = io
|
||||
.open_file(&path, turso_core::OpenFlags::Create, false)
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to open file: {e}")))?;
|
||||
@@ -78,7 +186,7 @@ impl Database {
|
||||
.connect()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to connect: {e}")))?;
|
||||
|
||||
Ok(Self::create(Some(db), io, conn, is_memory))
|
||||
Ok(Self::create(Some(db), io, conn, is_memory(&path)))
|
||||
}
|
||||
|
||||
pub fn create(
|
||||
@@ -90,13 +198,23 @@ impl Database {
|
||||
Database {
|
||||
_db: db,
|
||||
io,
|
||||
conn,
|
||||
conn: Some(conn),
|
||||
is_memory,
|
||||
is_open: Cell::new(true),
|
||||
default_safe_integers: Cell::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
fn conn(&self) -> Result<Arc<turso_core::Connection>> {
|
||||
let Some(conn) = self.conn.as_ref() else {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"connection is not set",
|
||||
));
|
||||
};
|
||||
Ok(conn.clone())
|
||||
}
|
||||
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn memory(&self) -> bool {
|
||||
@@ -109,7 +227,7 @@ impl Database {
|
||||
self.is_open.get()
|
||||
}
|
||||
|
||||
/// Executes a batch of SQL statements.
|
||||
/// Executes a batch of SQL statements on main thread
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
@@ -117,14 +235,23 @@ impl Database {
|
||||
///
|
||||
/// # Returns
|
||||
#[napi]
|
||||
pub fn batch(&self, sql: String) -> Result<()> {
|
||||
self.conn.prepare_execute_batch(&sql).map_err(|e| {
|
||||
Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("Failed to execute batch: {e}"),
|
||||
)
|
||||
})?;
|
||||
Ok(())
|
||||
pub fn batch_sync(&self, sql: String) -> Result<()> {
|
||||
batch_sync(&self.conn()?, &sql)
|
||||
}
|
||||
|
||||
/// Executes a batch of SQL statements outside of main thread
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `sql` - The SQL statements to execute.
|
||||
///
|
||||
/// # Returns
|
||||
#[napi]
|
||||
pub fn batch_async(&self, sql: String) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Batch {
|
||||
conn: self.conn()?.clone(),
|
||||
sql,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Prepares a statement for execution.
|
||||
@@ -139,14 +266,15 @@ impl Database {
|
||||
#[napi]
|
||||
pub fn prepare(&self, sql: String) -> Result<Statement> {
|
||||
let stmt = self
|
||||
.conn
|
||||
.conn()?
|
||||
.prepare(&sql)
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("{e}")))?;
|
||||
let column_names: Vec<std::ffi::CString> = (0..stmt.num_columns())
|
||||
.map(|i| std::ffi::CString::new(stmt.get_column_name(i).to_string()).unwrap())
|
||||
.collect();
|
||||
Ok(Statement {
|
||||
stmt: RefCell::new(Some(stmt)),
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
stmt: Arc::new(RefCell::new(Some(stmt))),
|
||||
column_names,
|
||||
mode: RefCell::new(PresentationMode::Expanded),
|
||||
safe_integers: Cell::new(self.default_safe_integers.get()),
|
||||
@@ -160,7 +288,7 @@ impl Database {
|
||||
/// The rowid of the last row inserted.
|
||||
#[napi]
|
||||
pub fn last_insert_rowid(&self) -> Result<i64> {
|
||||
Ok(self.conn.last_insert_rowid())
|
||||
Ok(self.conn()?.last_insert_rowid())
|
||||
}
|
||||
|
||||
/// Returns the number of changes made by the last statement.
|
||||
@@ -170,7 +298,7 @@ impl Database {
|
||||
/// The number of changes made by the last statement.
|
||||
#[napi]
|
||||
pub fn changes(&self) -> Result<i64> {
|
||||
Ok(self.conn.changes())
|
||||
Ok(self.conn()?.changes())
|
||||
}
|
||||
|
||||
/// Returns the total number of changes made by all statements.
|
||||
@@ -180,7 +308,7 @@ impl Database {
|
||||
/// The total number of changes made by all statements.
|
||||
#[napi]
|
||||
pub fn total_changes(&self) -> Result<i64> {
|
||||
Ok(self.conn.total_changes())
|
||||
Ok(self.conn()?.total_changes())
|
||||
}
|
||||
|
||||
/// Closes the database connection.
|
||||
@@ -189,9 +317,10 @@ impl Database {
|
||||
///
|
||||
/// `Ok(())` if the database is closed successfully.
|
||||
#[napi]
|
||||
pub fn close(&self) -> Result<()> {
|
||||
pub fn close(&mut self) -> Result<()> {
|
||||
self.is_open.set(false);
|
||||
// Database close is handled automatically when dropped
|
||||
let _ = self._db.take().unwrap();
|
||||
let _ = self.conn.take().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -225,7 +354,7 @@ impl Database {
|
||||
/// A prepared statement.
|
||||
#[napi]
|
||||
pub struct Statement {
|
||||
stmt: RefCell<Option<turso_core::Statement>>,
|
||||
stmt: Arc<RefCell<Option<turso_core::Statement>>>,
|
||||
column_names: Vec<std::ffi::CString>,
|
||||
mode: RefCell<PresentationMode>,
|
||||
safe_integers: Cell<bool>,
|
||||
@@ -344,31 +473,20 @@ impl Statement {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Step the statement and return result code:
|
||||
/// Step the statement and return result code (executed on the main thread):
|
||||
/// 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
#[napi]
|
||||
pub fn step(&self) -> Result<u32> {
|
||||
let mut stmt_ref = self.stmt.borrow_mut();
|
||||
let stmt = stmt_ref
|
||||
.as_mut()
|
||||
.ok_or_else(|| Error::new(Status::GenericFailure, "Statement has been finalized"))?;
|
||||
pub fn step_sync(&self) -> Result<u32> {
|
||||
step_sync(&self.stmt)
|
||||
}
|
||||
|
||||
match stmt.step() {
|
||||
Ok(turso_core::StepResult::Row) => Ok(STEP_ROW),
|
||||
Ok(turso_core::StepResult::Done) => Ok(STEP_DONE),
|
||||
Ok(turso_core::StepResult::IO) => Ok(STEP_IO),
|
||||
Ok(turso_core::StepResult::Interrupt) => Err(Error::new(
|
||||
Status::GenericFailure,
|
||||
"Statement was interrupted",
|
||||
)),
|
||||
Ok(turso_core::StepResult::Busy) => {
|
||||
Err(Error::new(Status::GenericFailure, "Database is busy"))
|
||||
}
|
||||
Err(e) => Err(Error::new(
|
||||
Status::GenericFailure,
|
||||
format!("Step failed: {e}"),
|
||||
)),
|
||||
}
|
||||
/// Step the statement and return result code (executed on the background thread):
|
||||
/// 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
#[napi]
|
||||
pub fn step_async(&self) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Step {
|
||||
stmt: self.stmt.clone(),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Get the current row data according to the presentation mode
|
||||
@@ -543,8 +661,17 @@ fn to_js_value<'a>(
|
||||
turso_core::Value::Float(f) => ToNapiValue::into_unknown(*f, env),
|
||||
turso_core::Value::Text(s) => ToNapiValue::into_unknown(s.as_str(), env),
|
||||
turso_core::Value::Blob(b) => {
|
||||
let buffer = Buffer::from(b.as_slice());
|
||||
ToNapiValue::into_unknown(buffer, env)
|
||||
#[cfg(not(feature = "browser"))]
|
||||
{
|
||||
let buffer = Buffer::from(b.as_slice());
|
||||
ToNapiValue::into_unknown(buffer, env)
|
||||
}
|
||||
// emnapi do not support Buffer
|
||||
#[cfg(feature = "browser")]
|
||||
{
|
||||
let buffer = Uint8Array::from(b.as_slice());
|
||||
ToNapiValue::into_unknown(buffer, env)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const __nodeFs = require('node:fs')
|
||||
const __nodePath = require('node:path')
|
||||
const { WASI: __nodeWASI } = require('node:wasi')
|
||||
const { Worker } = require('node:worker_threads')
|
||||
|
||||
const {
|
||||
createOnMessage: __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext: __emnapiGetDefaultContext,
|
||||
instantiateNapiModuleSync: __emnapiInstantiateNapiModuleSync,
|
||||
} = require('@napi-rs/wasm-runtime')
|
||||
|
||||
const __rootDir = __nodePath.parse(process.cwd()).root
|
||||
|
||||
const __wasi = new __nodeWASI({
|
||||
version: 'preview1',
|
||||
env: process.env,
|
||||
preopens: {
|
||||
[__rootDir]: __rootDir,
|
||||
}
|
||||
})
|
||||
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
const __sharedMemory = new WebAssembly.Memory({
|
||||
initial: 4000,
|
||||
maximum: 65536,
|
||||
shared: true,
|
||||
})
|
||||
|
||||
let __wasmFilePath = __nodePath.join(__dirname, 'turso.wasm32-wasi.wasm')
|
||||
const __wasmDebugFilePath = __nodePath.join(__dirname, 'turso.wasm32-wasi.debug.wasm')
|
||||
|
||||
if (__nodeFs.existsSync(__wasmDebugFilePath)) {
|
||||
__wasmFilePath = __wasmDebugFilePath
|
||||
} else if (!__nodeFs.existsSync(__wasmFilePath)) {
|
||||
try {
|
||||
__wasmFilePath = __nodePath.resolve('@tursodatabase/database-wasm32-wasi')
|
||||
} catch {
|
||||
throw new Error('Cannot find turso.wasm32-wasi.wasm file, and @tursodatabase/database-wasm32-wasi package is not installed.')
|
||||
}
|
||||
}
|
||||
|
||||
const { instance: __napiInstance, module: __wasiModule, napiModule: __napiModule } = __emnapiInstantiateNapiModuleSync(__nodeFs.readFileSync(__wasmFilePath), {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: (function() {
|
||||
const threadsSizeFromEnv = Number(process.env.NAPI_RS_ASYNC_WORK_POOL_SIZE ?? process.env.UV_THREADPOOL_SIZE)
|
||||
// NaN > 0 is false
|
||||
if (threadsSizeFromEnv > 0) {
|
||||
return threadsSizeFromEnv
|
||||
} else {
|
||||
return 4
|
||||
}
|
||||
})(),
|
||||
reuseWorker: true,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(__nodePath.join(__dirname, 'wasi-worker.mjs'), {
|
||||
env: process.env,
|
||||
})
|
||||
worker.onmessage = ({ data }) => {
|
||||
__wasmCreateOnMessageForFsProxy(__nodeFs)(data)
|
||||
}
|
||||
|
||||
// The main thread of Node.js waits for all the active handles before exiting.
|
||||
// But Rust threads are never waited without `thread::join`.
|
||||
// So here we hack the code of Node.js to prevent the workers from being referenced (active).
|
||||
// According to https://github.com/nodejs/node/blob/19e0d472728c79d418b74bddff588bea70a403d0/lib/internal/worker.js#L415,
|
||||
// a worker is consist of two handles: kPublicPort and kHandle.
|
||||
{
|
||||
const kPublicPort = Object.getOwnPropertySymbols(worker).find(s =>
|
||||
s.toString().includes("kPublicPort")
|
||||
);
|
||||
if (kPublicPort) {
|
||||
worker[kPublicPort].ref = () => {};
|
||||
}
|
||||
|
||||
const kHandle = Object.getOwnPropertySymbols(worker).find(s =>
|
||||
s.toString().includes("kHandle")
|
||||
);
|
||||
if (kHandle) {
|
||||
worker[kHandle].ref = () => {};
|
||||
}
|
||||
|
||||
worker.unref();
|
||||
}
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: __sharedMemory,
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
beforeInit({ instance }) {
|
||||
for (const name of Object.keys(instance.exports)) {
|
||||
if (name.startsWith('__napi_register__')) {
|
||||
instance.exports[name]()
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
module.exports = __napiModule.exports
|
||||
module.exports.Database = __napiModule.exports.Database
|
||||
module.exports.Statement = __napiModule.exports.Statement
|
||||
@@ -1,32 +0,0 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
const wasi = new WASI({
|
||||
print: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log.apply(console, arguments)
|
||||
},
|
||||
printErr: function() {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error.apply(console, arguments)
|
||||
},
|
||||
})
|
||||
return instantiateNapiModuleSync(wasmModule, {
|
||||
childThread: true,
|
||||
wasi,
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: wasmMemory,
|
||||
}
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
globalThis.onmessage = function (e) {
|
||||
handler.handle(e)
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import { createRequire } from "node:module";
|
||||
import { parse } from "node:path";
|
||||
import { WASI } from "node:wasi";
|
||||
import { parentPort, Worker } from "node:worker_threads";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const { instantiateNapiModuleSync, MessageHandler, getDefaultContext } = require("@napi-rs/wasm-runtime");
|
||||
|
||||
if (parentPort) {
|
||||
parentPort.on("message", (data) => {
|
||||
globalThis.onmessage({ data });
|
||||
});
|
||||
}
|
||||
|
||||
Object.assign(globalThis, {
|
||||
self: globalThis,
|
||||
require,
|
||||
Worker,
|
||||
importScripts: function (f) {
|
||||
;(0, eval)(fs.readFileSync(f, "utf8") + "//# sourceURL=" + f);
|
||||
},
|
||||
postMessage: function (msg) {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage(msg);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const emnapiContext = getDefaultContext();
|
||||
|
||||
const __rootDir = parse(process.cwd()).root;
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
const wasi = new WASI({
|
||||
version: 'preview1',
|
||||
env: process.env,
|
||||
preopens: {
|
||||
[__rootDir]: __rootDir,
|
||||
},
|
||||
});
|
||||
|
||||
return instantiateNapiModuleSync(wasmModule, {
|
||||
childThread: true,
|
||||
wasi,
|
||||
context: emnapiContext,
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: wasmMemory
|
||||
};
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
globalThis.onmessage = function (e) {
|
||||
handler.handle(e);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
412
cli/app.rs
412
cli/app.rs
@@ -94,6 +94,60 @@ struct QueryStatistics {
|
||||
execute_time_elapsed_samples: Vec<Duration>,
|
||||
}
|
||||
|
||||
macro_rules! row_step_result_query {
|
||||
($app:expr, $sql:expr, $rows:expr, $stats:expr, $row_handle:expr) => {
|
||||
if $app.interrupt_count.load(Ordering::Acquire) > 0 {
|
||||
println!("Query interrupted.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
match $rows.step() {
|
||||
Ok(StepResult::Row) => {
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
|
||||
$row_handle
|
||||
}
|
||||
Ok(StepResult::IO) => {
|
||||
let start = Instant::now();
|
||||
$rows.run_once()?;
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.io_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
}
|
||||
Ok(StepResult::Interrupt) => {
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Done) => {
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Busy) => {
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let _ = $app.writeln("database is busy");
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
if let Some(ref mut stats) = $stats {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let report = miette::Error::from(err).with_source_code($sql.to_owned());
|
||||
let _ = $app.writeln_fmt(format_args!("{report:?}"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl Limbo {
|
||||
pub fn new() -> anyhow::Result<(Self, WorkerGuard)> {
|
||||
let opts = Opts::parse();
|
||||
@@ -192,7 +246,7 @@ impl Limbo {
|
||||
self.handle_first_input(&sql)?;
|
||||
}
|
||||
if !quiet {
|
||||
self.write_fmt(format_args!("Turso v{}", env!("CARGO_PKG_VERSION")))?;
|
||||
self.writeln_fmt(format_args!("Turso v{}", env!("CARGO_PKG_VERSION")))?;
|
||||
self.writeln("Enter \".help\" for usage hints.")?;
|
||||
self.writeln(
|
||||
"This software is ALPHA, only use for development, testing, and experimentation.",
|
||||
@@ -363,7 +417,11 @@ impl Limbo {
|
||||
}
|
||||
|
||||
fn write_fmt(&mut self, fmt: std::fmt::Arguments) -> io::Result<()> {
|
||||
let _ = self.writer.as_mut().unwrap().write_fmt(fmt);
|
||||
self.writer.as_mut().unwrap().write_fmt(fmt)
|
||||
}
|
||||
|
||||
fn writeln_fmt(&mut self, fmt: std::fmt::Arguments) -> io::Result<()> {
|
||||
self.writer.as_mut().unwrap().write_fmt(fmt)?;
|
||||
self.writer.as_mut().unwrap().write_all(b"\n")
|
||||
}
|
||||
|
||||
@@ -376,6 +434,12 @@ impl Limbo {
|
||||
self.writer.as_mut().unwrap().write_all(b"\n")
|
||||
}
|
||||
|
||||
fn write_null(&mut self) -> io::Result<()> {
|
||||
self.writer
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.write_all(self.opts.null_value.as_bytes())
|
||||
}
|
||||
fn run_query(&mut self, input: &str) {
|
||||
let echo = self.opts.echo;
|
||||
if echo {
|
||||
@@ -383,9 +447,13 @@ impl Limbo {
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
let mut stats = QueryStatistics {
|
||||
io_time_elapsed_samples: vec![],
|
||||
execute_time_elapsed_samples: vec![],
|
||||
let mut stats = if self.opts.timer {
|
||||
Some(QueryStatistics {
|
||||
io_time_elapsed_samples: vec![],
|
||||
execute_time_elapsed_samples: vec![],
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// TODO this is a quickfix. Some ideas to do case insensitive comparisons is to use
|
||||
// Uncased or Unicase.
|
||||
@@ -410,14 +478,15 @@ impl Limbo {
|
||||
let runner = conn.query_runner(input.as_bytes());
|
||||
for output in runner {
|
||||
if self
|
||||
.print_query_result(input, output, Some(&mut stats))
|
||||
.print_query_result(input, output, stats.as_mut())
|
||||
.is_err()
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.print_query_performance_stats(start, stats);
|
||||
|
||||
self.print_query_performance_stats(start, stats.as_ref());
|
||||
|
||||
// Display stats if enabled
|
||||
if self.opts.stats {
|
||||
@@ -434,7 +503,7 @@ impl Limbo {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_query_performance_stats(&mut self, start: Instant, stats: QueryStatistics) {
|
||||
fn print_query_performance_stats(&mut self, start: Instant, stats: Option<&QueryStatistics>) {
|
||||
let elapsed_as_str = |duration: Duration| {
|
||||
if duration.as_secs() >= 1 {
|
||||
format!("{} s", duration.as_secs_f64())
|
||||
@@ -446,7 +515,7 @@ impl Limbo {
|
||||
format!("{} ns", duration.as_nanos())
|
||||
}
|
||||
};
|
||||
let sample_stats_as_str = |name: &str, samples: Vec<Duration>| {
|
||||
let sample_stats_as_str = |name: &str, samples: &Vec<Duration>| {
|
||||
if samples.is_empty() {
|
||||
return format!("{name}: No samples available");
|
||||
}
|
||||
@@ -460,18 +529,20 @@ impl Limbo {
|
||||
)
|
||||
};
|
||||
if self.opts.timer {
|
||||
let _ = self.writeln("Command stats:\n----------------------------");
|
||||
let _ = self.writeln(format!(
|
||||
"total: {} (this includes parsing/coloring of cli app)\n",
|
||||
elapsed_as_str(start.elapsed())
|
||||
));
|
||||
if let Some(stats) = stats {
|
||||
let _ = self.writeln("Command stats:\n----------------------------");
|
||||
let _ = self.writeln(format!(
|
||||
"total: {} (this includes parsing/coloring of cli app)\n",
|
||||
elapsed_as_str(start.elapsed())
|
||||
));
|
||||
|
||||
let _ = self.writeln("query execution stats:\n----------------------------");
|
||||
let _ = self.writeln(sample_stats_as_str(
|
||||
"Execution",
|
||||
stats.execute_time_elapsed_samples,
|
||||
));
|
||||
let _ = self.writeln(sample_stats_as_str("I/O", stats.io_time_elapsed_samples));
|
||||
let _ = self.writeln("query execution stats:\n----------------------------");
|
||||
let _ = self.writeln(sample_stats_as_str(
|
||||
"Execution",
|
||||
&stats.execute_time_elapsed_samples,
|
||||
));
|
||||
let _ = self.writeln(sample_stats_as_str("I/O", &stats.io_time_elapsed_samples));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -593,12 +664,12 @@ impl Limbo {
|
||||
if let Some(opcode) = args.opcode {
|
||||
for op in &OPCODE_DESCRIPTIONS {
|
||||
if op.name.eq_ignore_ascii_case(opcode.trim()) {
|
||||
let _ = self.write_fmt(format_args!("{op}"));
|
||||
let _ = self.writeln_fmt(format_args!("{op}"));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for op in &OPCODE_DESCRIPTIONS {
|
||||
let _ = self.write_fmt(format_args!("{op}\n"));
|
||||
let _ = self.writeln_fmt(format_args!("{op}\n"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -607,13 +678,13 @@ impl Limbo {
|
||||
}
|
||||
Command::OutputMode(args) => {
|
||||
if let Err(e) = self.set_mode(args.mode) {
|
||||
let _ = self.write_fmt(format_args!("Error: {e}"));
|
||||
let _ = self.writeln_fmt(format_args!("Error: {e}"));
|
||||
}
|
||||
}
|
||||
Command::SetOutput(args) => {
|
||||
if let Some(path) = args.path {
|
||||
if let Err(e) = self.set_output_file(&path) {
|
||||
let _ = self.write_fmt(format_args!("Error: {e}"));
|
||||
let _ = self.writeln_fmt(format_args!("Error: {e}"));
|
||||
}
|
||||
} else {
|
||||
self.set_output_stdout();
|
||||
@@ -646,7 +717,7 @@ impl Limbo {
|
||||
}
|
||||
Command::Dump => {
|
||||
if let Err(e) = self.dump_database() {
|
||||
let _ = self.write_fmt(format_args!("/****** ERROR: {e} ******/"));
|
||||
let _ = self.writeln_fmt(format_args!("/****** ERROR: {e} ******/"));
|
||||
}
|
||||
}
|
||||
Command::DbConfig(_args) => {
|
||||
@@ -695,88 +766,35 @@ impl Limbo {
|
||||
OutputMode::List => {
|
||||
let mut headers_printed = false;
|
||||
loop {
|
||||
if self.interrupt_count.load(Ordering::Acquire) > 0 {
|
||||
println!("Query interrupted.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let start = Instant::now();
|
||||
|
||||
match rows.step() {
|
||||
Ok(StepResult::Row) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
|
||||
// Print headers if enabled and not already printed
|
||||
if self.opts.headers && !headers_printed {
|
||||
for i in 0..rows.num_columns() {
|
||||
if i > 0 {
|
||||
let _ = self.write(b"|");
|
||||
}
|
||||
let _ = self.write(rows.get_column_name(i).as_bytes());
|
||||
}
|
||||
let _ = self.writeln("");
|
||||
headers_printed = true;
|
||||
}
|
||||
|
||||
let row = rows.row().unwrap();
|
||||
for (i, value) in row.get_values().enumerate() {
|
||||
row_step_result_query!(self, sql, rows, statistics, {
|
||||
// Print headers if enabled and not already printed
|
||||
if self.opts.headers && !headers_printed {
|
||||
for i in 0..rows.num_columns() {
|
||||
if i > 0 {
|
||||
let _ = self.write(b"|");
|
||||
}
|
||||
if matches!(value, Value::Null) {
|
||||
let bytes = self.opts.null_value.clone();
|
||||
self.write(bytes.as_bytes())?;
|
||||
} else {
|
||||
self.write(format!("{value}").as_bytes())?;
|
||||
}
|
||||
let _ = self.write(rows.get_column_name(i).as_bytes());
|
||||
}
|
||||
let _ = self.writeln("");
|
||||
headers_printed = true;
|
||||
}
|
||||
Ok(StepResult::IO) => {
|
||||
let start = Instant::now();
|
||||
rows.run_once()?;
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.io_time_elapsed_samples.push(start.elapsed());
|
||||
|
||||
let row = rows.row().unwrap();
|
||||
for (i, value) in row.get_values().enumerate() {
|
||||
if i > 0 {
|
||||
let _ = self.write(b"|");
|
||||
}
|
||||
if matches!(value, Value::Null) {
|
||||
self.write_null()?;
|
||||
} else {
|
||||
write!(self, "{value}")?;
|
||||
}
|
||||
}
|
||||
Ok(StepResult::Interrupt) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Done) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Busy) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let _ = self.writeln("database is busy");
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let report =
|
||||
miette::Error::from(err).with_source_code(sql.to_owned());
|
||||
let _ = self.write_fmt(format_args!("{report:?}"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
let _ = self.writeln("");
|
||||
});
|
||||
}
|
||||
}
|
||||
OutputMode::Pretty => {
|
||||
if self.interrupt_count.load(Ordering::Acquire) > 0 {
|
||||
println!("Query interrupted.");
|
||||
return Ok(());
|
||||
}
|
||||
let config = self.config.as_ref().unwrap();
|
||||
let mut table = Table::new();
|
||||
table
|
||||
@@ -795,170 +813,76 @@ impl Limbo {
|
||||
table.set_header(header);
|
||||
}
|
||||
loop {
|
||||
let start = Instant::now();
|
||||
match rows.step() {
|
||||
Ok(StepResult::Row) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let record = rows.row().unwrap();
|
||||
let mut row = Row::new();
|
||||
row.max_height(1);
|
||||
for (idx, value) in record.get_values().enumerate() {
|
||||
let (content, alignment) = match value {
|
||||
Value::Null => {
|
||||
(self.opts.null_value.clone(), CellAlignment::Left)
|
||||
}
|
||||
Value::Integer(_) => {
|
||||
(format!("{value}"), CellAlignment::Right)
|
||||
}
|
||||
Value::Float(_) => {
|
||||
(format!("{value}"), CellAlignment::Right)
|
||||
}
|
||||
Value::Text(_) => (format!("{value}"), CellAlignment::Left),
|
||||
Value::Blob(_) => (format!("{value}"), CellAlignment::Left),
|
||||
};
|
||||
row.add_cell(
|
||||
Cell::new(content)
|
||||
.set_alignment(alignment)
|
||||
.fg(config.table.column_colors
|
||||
[idx % config.table.column_colors.len()]
|
||||
.as_comfy_table_color()),
|
||||
);
|
||||
}
|
||||
table.add_row(row);
|
||||
row_step_result_query!(self, sql, rows, statistics, {
|
||||
let record = rows.row().unwrap();
|
||||
let mut row = Row::new();
|
||||
row.max_height(1);
|
||||
for (idx, value) in record.get_values().enumerate() {
|
||||
let (content, alignment) = match value {
|
||||
Value::Null => {
|
||||
(self.opts.null_value.clone(), CellAlignment::Left)
|
||||
}
|
||||
Value::Integer(_) => (format!("{value}"), CellAlignment::Right),
|
||||
Value::Float(_) => (format!("{value}"), CellAlignment::Right),
|
||||
Value::Text(_) => (format!("{value}"), CellAlignment::Left),
|
||||
Value::Blob(_) => (format!("{value}"), CellAlignment::Left),
|
||||
};
|
||||
row.add_cell(
|
||||
Cell::new(content)
|
||||
.set_alignment(alignment)
|
||||
.fg(config.table.column_colors
|
||||
[idx % config.table.column_colors.len()]
|
||||
.as_comfy_table_color()),
|
||||
);
|
||||
}
|
||||
Ok(StepResult::IO) => {
|
||||
let start = Instant::now();
|
||||
rows.run_once()?;
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.io_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
}
|
||||
Ok(StepResult::Interrupt) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Done) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Busy) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let _ = self.writeln("database is busy");
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let report =
|
||||
miette::Error::from(err).with_source_code(sql.to_owned());
|
||||
let _ = self.write_fmt(format_args!("{report:?}"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
table.add_row(row);
|
||||
});
|
||||
}
|
||||
|
||||
if !table.is_empty() {
|
||||
let _ = self.write_fmt(format_args!("{table}"));
|
||||
writeln!(self, "{table}")?;
|
||||
}
|
||||
}
|
||||
OutputMode::Line => {
|
||||
let mut first_row_printed = false;
|
||||
|
||||
let max_width = (0..rows.num_columns())
|
||||
.map(|i| rows.get_column_name(i).len())
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
let formatted_columns: Vec<String> = (0..rows.num_columns())
|
||||
.map(|i| format!("{:>width$}", rows.get_column_name(i), width = max_width))
|
||||
.collect();
|
||||
|
||||
loop {
|
||||
if self.interrupt_count.load(Ordering::Acquire) > 0 {
|
||||
println!("Query interrupted.");
|
||||
return Ok(());
|
||||
}
|
||||
row_step_result_query!(self, sql, rows, statistics, {
|
||||
let record = rows.row().unwrap();
|
||||
|
||||
let start = Instant::now();
|
||||
if !first_row_printed {
|
||||
first_row_printed = true;
|
||||
} else {
|
||||
self.writeln("")?;
|
||||
}
|
||||
|
||||
let max_width = (0..rows.num_columns())
|
||||
.map(|i| rows.get_column_name(i).len())
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
let formatted_columns: Vec<String> = (0..rows.num_columns())
|
||||
.map(|i| {
|
||||
format!("{:>width$}", rows.get_column_name(i), width = max_width)
|
||||
})
|
||||
.collect();
|
||||
|
||||
match rows.step() {
|
||||
Ok(StepResult::Row) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let record = rows.row().unwrap();
|
||||
|
||||
if !first_row_printed {
|
||||
first_row_printed = true;
|
||||
for (i, value) in record.get_values().enumerate() {
|
||||
self.write(&formatted_columns[i])?;
|
||||
self.write(b" = ")?;
|
||||
if matches!(value, Value::Null) {
|
||||
self.write_null()?;
|
||||
} else {
|
||||
self.writeln("")?;
|
||||
}
|
||||
|
||||
for (i, value) in record.get_values().enumerate() {
|
||||
self.write(&formatted_columns[i])?;
|
||||
self.write(b" = ")?;
|
||||
if matches!(value, Value::Null) {
|
||||
let bytes = self.opts.null_value.clone();
|
||||
self.write(bytes.as_bytes())?;
|
||||
} else {
|
||||
self.write(format!("{value}").as_bytes())?;
|
||||
}
|
||||
self.writeln("")?;
|
||||
write!(self, "{value}")?;
|
||||
}
|
||||
self.writeln("")?;
|
||||
}
|
||||
Ok(StepResult::IO) => {
|
||||
let start = Instant::now();
|
||||
rows.run_once()?;
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.io_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
}
|
||||
Ok(StepResult::Interrupt) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Done) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
break;
|
||||
}
|
||||
Ok(StepResult::Busy) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let _ = self.writeln("database is busy");
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
if let Some(ref mut stats) = statistics {
|
||||
stats.execute_time_elapsed_samples.push(start.elapsed());
|
||||
}
|
||||
let report =
|
||||
miette::Error::from(err).with_source_code(sql.to_owned());
|
||||
let _ = self.write_fmt(format_args!("{report:?}"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(None) => {}
|
||||
Err(err) => {
|
||||
let report = miette::Error::from(err).with_source_code(sql.to_owned());
|
||||
let _ = self.write_fmt(format_args!("{report:?}"));
|
||||
let _ = self.writeln_fmt(format_args!("{report:?}"));
|
||||
anyhow::bail!("We have to throw here, even if we printed error");
|
||||
}
|
||||
}
|
||||
@@ -1034,13 +958,13 @@ impl Limbo {
|
||||
schema_str.to_string()
|
||||
}
|
||||
};
|
||||
let _ = self.write_fmt(format_args!("{modified_schema};"));
|
||||
let _ = self.writeln_fmt(format_args!("{modified_schema};"));
|
||||
// For views, add the column comment like SQLite does
|
||||
if obj_type.as_str() == "view" {
|
||||
let columns = self
|
||||
.get_view_columns(obj_name.as_str())
|
||||
.unwrap_or_else(|_| "x".to_string());
|
||||
let _ = self.write_fmt(format_args!("/* {}({}) */", obj_name.as_str(), columns));
|
||||
let _ = self.writeln_fmt(format_args!("/* {}({}) */", obj_name.as_str(), columns));
|
||||
}
|
||||
true
|
||||
} else {
|
||||
@@ -1180,7 +1104,7 @@ impl Limbo {
|
||||
format!("{target_db}.{table_name}")
|
||||
};
|
||||
let _ = self
|
||||
.write_fmt(format_args!("-- Error: Table '{table_display}' not found."));
|
||||
.writeln_fmt(format_args!("-- Error: Table '{table_display}' not found."));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
@@ -1257,7 +1181,7 @@ impl Limbo {
|
||||
if !tables.is_empty() {
|
||||
let _ = self.writeln(tables.trim_end().as_bytes());
|
||||
} else if let Some(pattern) = pattern {
|
||||
let _ = self.write_fmt(format_args!(
|
||||
let _ = self.writeln_fmt(format_args!(
|
||||
"Error: Tables with pattern '{pattern}' not found."
|
||||
));
|
||||
} else {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use std::{cell::RefCell, result::Result, sync::Arc};
|
||||
|
||||
use turso_ext::{ConstraintUsage, ResultCode};
|
||||
use turso_ext::{ConstraintOp, ConstraintUsage, ResultCode};
|
||||
|
||||
use crate::{
|
||||
json::{
|
||||
convert_dbtype_to_jsonb,
|
||||
jsonb::{ArrayIteratorState, Jsonb, ObjectIteratorState},
|
||||
convert_dbtype_to_jsonb, json_path_from_db_value,
|
||||
jsonb::{ArrayIteratorState, Jsonb, ObjectIteratorState, SearchOperation},
|
||||
vtab::columns::Columns,
|
||||
Conv,
|
||||
},
|
||||
@@ -46,8 +46,6 @@ impl InternalVirtualTable for JsonEachVirtualTable {
|
||||
constraints: &[turso_ext::ConstraintInfo],
|
||||
_order_by: &[turso_ext::OrderByInfo],
|
||||
) -> Result<turso_ext::IndexInfo, ResultCode> {
|
||||
use turso_ext::ConstraintOp;
|
||||
|
||||
let mut usages = vec![
|
||||
ConstraintUsage {
|
||||
argv_index: None,
|
||||
@@ -55,25 +53,51 @@ impl InternalVirtualTable for JsonEachVirtualTable {
|
||||
};
|
||||
constraints.len()
|
||||
];
|
||||
let mut have_json = false;
|
||||
|
||||
let mut json_idx: Option<usize> = None;
|
||||
let mut path_idx: Option<usize> = None;
|
||||
for (i, c) in constraints.iter().enumerate() {
|
||||
if c.usable && c.op == ConstraintOp::Eq && c.column_index as usize == COL_JSON {
|
||||
usages[i] = ConstraintUsage {
|
||||
argv_index: Some(1),
|
||||
omit: true,
|
||||
};
|
||||
have_json = true;
|
||||
break;
|
||||
if !c.usable || c.op != ConstraintOp::Eq {
|
||||
continue;
|
||||
}
|
||||
match c.column_index as usize {
|
||||
COL_JSON => json_idx = Some(i),
|
||||
COL_ROOT => path_idx = Some(i),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let argc = match (json_idx, path_idx) {
|
||||
(Some(_), Some(_)) => 2,
|
||||
(Some(_), None) => 1,
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
if argc >= 1 {
|
||||
usages[json_idx.unwrap()] = ConstraintUsage {
|
||||
argv_index: Some(1),
|
||||
omit: true,
|
||||
};
|
||||
}
|
||||
if argc == 2 {
|
||||
usages[path_idx.unwrap()] = ConstraintUsage {
|
||||
argv_index: Some(2),
|
||||
omit: true,
|
||||
};
|
||||
}
|
||||
|
||||
let (cost, rows) = match argc {
|
||||
1 => (1., 25),
|
||||
2 => (1., 25),
|
||||
_ => (f64::MAX, 25),
|
||||
};
|
||||
|
||||
Ok(turso_ext::IndexInfo {
|
||||
idx_num: i32::from(have_json),
|
||||
idx_num: -1,
|
||||
idx_str: None,
|
||||
order_by_consumed: false,
|
||||
estimated_cost: if have_json { 10.0 } else { 1_000_000.0 },
|
||||
estimated_rows: if have_json { 100 } else { u32::MAX },
|
||||
estimated_cost: cost,
|
||||
estimated_rows: rows,
|
||||
constraint_usages: usages,
|
||||
})
|
||||
}
|
||||
@@ -112,6 +136,7 @@ pub struct JsonEachCursor {
|
||||
rowid: i64,
|
||||
no_more_rows: bool,
|
||||
json: Jsonb,
|
||||
root_path: Option<String>,
|
||||
iterator_state: IteratorState,
|
||||
columns: Columns,
|
||||
}
|
||||
@@ -122,6 +147,7 @@ impl Default for JsonEachCursor {
|
||||
rowid: 0,
|
||||
no_more_rows: false,
|
||||
json: Jsonb::new(0, None),
|
||||
root_path: None,
|
||||
iterator_state: IteratorState::None,
|
||||
columns: Columns::default(),
|
||||
}
|
||||
@@ -138,25 +164,31 @@ impl InternalVirtualTableCursor for JsonEachCursor {
|
||||
if args.is_empty() {
|
||||
return Ok(false);
|
||||
}
|
||||
if args.len() == 2 {
|
||||
return Err(LimboError::InvalidArgument(
|
||||
"2-arg json_each is not supported yet".to_owned(),
|
||||
));
|
||||
}
|
||||
if args.len() != 1 && args.len() != 2 {
|
||||
return Err(LimboError::InvalidArgument(
|
||||
"json_each accepts 1 or 2 arguments".to_owned(),
|
||||
));
|
||||
}
|
||||
|
||||
let db_value = &args[0];
|
||||
let mut jsonb = convert_dbtype_to_jsonb(&args[0], Conv::Strict)?;
|
||||
if args.len() == 1 {
|
||||
self.json = jsonb;
|
||||
} else if args.len() == 2 {
|
||||
let Value::Text(root_path) = &args[1] else {
|
||||
return Err(LimboError::InvalidArgument(
|
||||
"root path should be text".to_owned(),
|
||||
));
|
||||
};
|
||||
self.root_path = Some(root_path.as_str().to_owned());
|
||||
self.json = if let Some(json) = navigate_to_path(&mut jsonb, &args[1])? {
|
||||
json
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
}
|
||||
let json_element_type = self.json.element_type()?;
|
||||
|
||||
let jsonb = convert_dbtype_to_jsonb(db_value, Conv::Strict)?;
|
||||
|
||||
let element_type = jsonb.element_type()?;
|
||||
self.json = jsonb;
|
||||
|
||||
match element_type {
|
||||
match json_element_type {
|
||||
jsonb::ElementType::ARRAY => {
|
||||
let iter = self.json.array_iterator()?;
|
||||
self.iterator_state = IteratorState::Array(iter);
|
||||
@@ -181,7 +213,7 @@ impl InternalVirtualTableCursor for JsonEachCursor {
|
||||
jsonb::ElementType::RESERVED1
|
||||
| jsonb::ElementType::RESERVED2
|
||||
| jsonb::ElementType::RESERVED3 => {
|
||||
unreachable!("element type not supported: {element_type:?}");
|
||||
unreachable!("element type not supported: {json_element_type:?}");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -201,7 +233,11 @@ impl InternalVirtualTableCursor for JsonEachCursor {
|
||||
return Ok(false);
|
||||
};
|
||||
self.iterator_state = IteratorState::Array(new_state);
|
||||
self.columns = Columns::new(columns::Key::Integer(idx as i64), jsonb);
|
||||
self.columns = Columns::new(
|
||||
columns::Key::Integer(idx as i64),
|
||||
jsonb,
|
||||
self.root_path.clone(),
|
||||
);
|
||||
}
|
||||
IteratorState::Object(state) => {
|
||||
let Some(((_idx, key, value), new_state)): Option<(
|
||||
@@ -214,11 +250,12 @@ impl InternalVirtualTableCursor for JsonEachCursor {
|
||||
|
||||
self.iterator_state = IteratorState::Object(new_state);
|
||||
let key = key.to_string();
|
||||
self.columns = Columns::new(columns::Key::String(key), value);
|
||||
self.columns =
|
||||
Columns::new(columns::Key::String(key), value, self.root_path.clone());
|
||||
}
|
||||
IteratorState::Primitive => {
|
||||
let json = std::mem::replace(&mut self.json, Jsonb::new(0, None));
|
||||
self.columns = Columns::new_from_primitive(json);
|
||||
self.columns = Columns::new_from_primitive(json, self.root_path.clone());
|
||||
self.no_more_rows = true;
|
||||
}
|
||||
IteratorState::None => unreachable!(),
|
||||
@@ -247,6 +284,20 @@ impl InternalVirtualTableCursor for JsonEachCursor {
|
||||
}
|
||||
}
|
||||
|
||||
fn navigate_to_path(jsonb: &mut Jsonb, path: &Value) -> Result<Option<Jsonb>, LimboError> {
|
||||
let json_path = json_path_from_db_value(path, true)?.ok_or_else(|| {
|
||||
LimboError::InvalidArgument(format!("path '{path}' is not a valid json path"))
|
||||
})?;
|
||||
let mut search_operation = SearchOperation::new(jsonb.len() / 2);
|
||||
if jsonb
|
||||
.operate_on_path(&json_path, &mut search_operation)
|
||||
.is_err()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
Ok(Some(search_operation.result()))
|
||||
}
|
||||
|
||||
mod columns {
|
||||
use crate::{
|
||||
json::{
|
||||
@@ -262,16 +313,17 @@ mod columns {
|
||||
pub(super) enum Key {
|
||||
Integer(i64),
|
||||
String(String),
|
||||
None,
|
||||
}
|
||||
|
||||
impl Key {
|
||||
fn empty() -> Self {
|
||||
Self::Integer(0)
|
||||
Self::None
|
||||
}
|
||||
|
||||
fn fullkey_representation(&self) -> Value {
|
||||
fn fullkey_representation(&self, root_path: &str) -> Value {
|
||||
match self {
|
||||
Key::Integer(ref i) => Value::Text(Text::new(&format!("$[{i}]"))),
|
||||
Key::Integer(ref i) => Value::Text(Text::new(&format!("{root_path}[{i}]"))),
|
||||
Key::String(ref text) => {
|
||||
let mut needs_quoting: bool = false;
|
||||
|
||||
@@ -283,10 +335,11 @@ mod columns {
|
||||
if needs_quoting {
|
||||
text = format!("\"{text}\"");
|
||||
}
|
||||
let s = format!("$.{text}");
|
||||
let s = format!("{root_path}.{text}");
|
||||
|
||||
Value::Text(Text::new(&s))
|
||||
}
|
||||
Key::None => Value::Text(Text::new(root_path)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,6 +349,7 @@ mod columns {
|
||||
Key::String(ref s) => Value::Text(Text::new(
|
||||
&s[1..s.len() - 1].to_owned().replace("\\\"", "\""),
|
||||
)),
|
||||
Key::None => Value::Null,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -303,7 +357,7 @@ mod columns {
|
||||
pub(super) struct Columns {
|
||||
key: Key,
|
||||
value: Jsonb,
|
||||
is_primitive: bool,
|
||||
root_path: String,
|
||||
}
|
||||
|
||||
impl Default for Columns {
|
||||
@@ -311,25 +365,25 @@ mod columns {
|
||||
Self {
|
||||
key: Key::empty(),
|
||||
value: Jsonb::new(0, None),
|
||||
is_primitive: false,
|
||||
root_path: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Columns {
|
||||
pub(super) fn new(key: Key, value: Jsonb) -> Self {
|
||||
pub(super) fn new(key: Key, value: Jsonb, root_path: Option<String>) -> Self {
|
||||
Self {
|
||||
key,
|
||||
value,
|
||||
is_primitive: false,
|
||||
root_path: root_path.unwrap_or_else(|| "$".to_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn new_from_primitive(value: Jsonb) -> Self {
|
||||
pub(super) fn new_from_primitive(value: Jsonb, root_path: Option<String>) -> Self {
|
||||
Self {
|
||||
key: Key::empty(),
|
||||
value,
|
||||
is_primitive: true,
|
||||
root_path: root_path.unwrap_or_else(|| "$".to_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -348,9 +402,6 @@ mod columns {
|
||||
}
|
||||
|
||||
pub(super) fn key(&self) -> Value {
|
||||
if self.is_primitive {
|
||||
return Value::Null;
|
||||
}
|
||||
self.key.key_representation()
|
||||
}
|
||||
|
||||
@@ -397,14 +448,11 @@ mod columns {
|
||||
}
|
||||
|
||||
pub(super) fn fullkey(&self) -> Value {
|
||||
if self.is_primitive {
|
||||
return Value::Text(Text::new("$"));
|
||||
}
|
||||
self.key.fullkey_representation()
|
||||
self.key.fullkey_representation(&self.root_path)
|
||||
}
|
||||
|
||||
pub(super) fn path(&self) -> Value {
|
||||
Value::Text(Text::new("$"))
|
||||
Value::Text(Text::new(&self.root_path))
|
||||
}
|
||||
|
||||
pub(super) fn parent(&self) -> Value {
|
||||
|
||||
84
core/lib.rs
84
core/lib.rs
@@ -626,6 +626,38 @@ impl Database {
|
||||
Ok(pager)
|
||||
}
|
||||
|
||||
#[cfg(feature = "fs")]
|
||||
pub fn io_for_path(path: &str) -> Result<Arc<dyn IO>> {
|
||||
use crate::util::MEMORY_PATH;
|
||||
let io: Arc<dyn IO> = match path.trim() {
|
||||
MEMORY_PATH => Arc::new(MemoryIO::new()),
|
||||
_ => Arc::new(PlatformIO::new()?),
|
||||
};
|
||||
Ok(io)
|
||||
}
|
||||
|
||||
#[cfg(feature = "fs")]
|
||||
pub fn io_for_vfs<S: AsRef<str> + std::fmt::Display>(vfs: S) -> Result<Arc<dyn IO>> {
|
||||
let vfsmods = ext::add_builtin_vfs_extensions(None)?;
|
||||
let io: Arc<dyn IO> = match vfsmods
|
||||
.iter()
|
||||
.find(|v| v.0 == vfs.as_ref())
|
||||
.map(|v| v.1.clone())
|
||||
{
|
||||
Some(vfs) => vfs,
|
||||
None => match vfs.as_ref() {
|
||||
"memory" => Arc::new(MemoryIO::new()),
|
||||
"syscall" => Arc::new(SyscallIO::new()?),
|
||||
#[cfg(all(target_os = "linux", feature = "io_uring"))]
|
||||
"io_uring" => Arc::new(UringIO::new()?),
|
||||
other => {
|
||||
return Err(LimboError::InvalidArgument(format!("no such VFS: {other}")));
|
||||
}
|
||||
},
|
||||
};
|
||||
Ok(io)
|
||||
}
|
||||
|
||||
/// Open a new database file with optionally specifying a VFS without an existing database
|
||||
/// connection and symbol table to register extensions.
|
||||
#[cfg(feature = "fs")]
|
||||
@@ -639,40 +671,13 @@ impl Database {
|
||||
where
|
||||
S: AsRef<str> + std::fmt::Display,
|
||||
{
|
||||
use crate::util::MEMORY_PATH;
|
||||
let vfsmods = ext::add_builtin_vfs_extensions(None)?;
|
||||
match vfs {
|
||||
Some(vfs) => {
|
||||
let io: Arc<dyn IO> = match vfsmods
|
||||
.iter()
|
||||
.find(|v| v.0 == vfs.as_ref())
|
||||
.map(|v| v.1.clone())
|
||||
{
|
||||
Some(vfs) => vfs,
|
||||
None => match vfs.as_ref() {
|
||||
"memory" => Arc::new(MemoryIO::new()),
|
||||
"syscall" => Arc::new(SyscallIO::new()?),
|
||||
#[cfg(all(target_os = "linux", feature = "io_uring"))]
|
||||
"io_uring" => Arc::new(UringIO::new()?),
|
||||
other => {
|
||||
return Err(LimboError::InvalidArgument(format!(
|
||||
"no such VFS: {other}"
|
||||
)));
|
||||
}
|
||||
},
|
||||
};
|
||||
let db = Self::open_file_with_flags(io.clone(), path, flags, opts)?;
|
||||
Ok((io, db))
|
||||
}
|
||||
None => {
|
||||
let io: Arc<dyn IO> = match path.trim() {
|
||||
MEMORY_PATH => Arc::new(MemoryIO::new()),
|
||||
_ => Arc::new(PlatformIO::new()?),
|
||||
};
|
||||
let db = Self::open_file_with_flags(io.clone(), path, flags, opts)?;
|
||||
Ok((io, db))
|
||||
}
|
||||
}
|
||||
let io = vfs
|
||||
.map(|vfs| Self::io_for_vfs(vfs))
|
||||
.or_else(|| Some(Self::io_for_path(path)))
|
||||
.transpose()?
|
||||
.unwrap();
|
||||
let db = Self::open_file_with_flags(io.clone(), path, flags, opts)?;
|
||||
Ok((io, db))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@@ -1304,12 +1309,17 @@ impl Connection {
|
||||
}
|
||||
|
||||
#[cfg(feature = "fs")]
|
||||
fn from_uri_attached(uri: &str, db_opts: DatabaseOpts) -> Result<Arc<Database>> {
|
||||
fn from_uri_attached(
|
||||
uri: &str,
|
||||
db_opts: DatabaseOpts,
|
||||
io: Arc<dyn IO>,
|
||||
) -> Result<Arc<Database>> {
|
||||
let mut opts = OpenOptions::parse(uri)?;
|
||||
// FIXME: for now, only support read only attach
|
||||
opts.mode = OpenMode::ReadOnly;
|
||||
let flags = opts.get_flags()?;
|
||||
let (_io, db) = Database::open_new(&opts.path, opts.vfs.as_ref(), flags, db_opts)?;
|
||||
let io = opts.vfs.map(Database::io_for_vfs).unwrap_or(Ok(io))?;
|
||||
let db = Database::open_file_with_flags(io.clone(), &opts.path, flags, db_opts)?;
|
||||
if let Some(modeof) = opts.modeof {
|
||||
let perms = std::fs::metadata(modeof)?;
|
||||
std::fs::set_permissions(&opts.path, perms.permissions())?;
|
||||
@@ -1852,7 +1862,7 @@ impl Connection {
|
||||
.with_indexes(use_indexes)
|
||||
.with_views(use_views)
|
||||
.with_strict(use_strict);
|
||||
let db = Self::from_uri_attached(path, db_opts)?;
|
||||
let db = Self::from_uri_attached(path, db_opts, self._db.io.clone())?;
|
||||
let pager = Rc::new(db.init_pager(None)?);
|
||||
|
||||
self.attached_databases
|
||||
|
||||
@@ -528,20 +528,36 @@ impl PageCache {
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) -> Result<(), CacheError> {
|
||||
for e in self.entries.iter() {
|
||||
if self.map.len() == 0 {
|
||||
// Fast path: nothing to do.
|
||||
self.clock_hand = NULL;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for node in self.map.iter() {
|
||||
let e = &self.entries[node.slot_index];
|
||||
if let Some(ref p) = e.page {
|
||||
if p.is_dirty() {
|
||||
return Err(CacheError::Dirty { pgno: p.get().id });
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut used_slots = Vec::with_capacity(self.map.len());
|
||||
for node in self.map.iter() {
|
||||
used_slots.push(node.slot_index);
|
||||
}
|
||||
// don't touch already-free slots at all.
|
||||
for &i in &used_slots {
|
||||
if let Some(p) = self.entries[i].page.take() {
|
||||
p.clear_loaded();
|
||||
let _ = p.get().contents.take();
|
||||
}
|
||||
self.entries[i].clear_ref();
|
||||
self.entries[i].reset_links();
|
||||
}
|
||||
self.entries.fill(PageCacheEntry::empty());
|
||||
self.map.clear();
|
||||
self.clock_hand = NULL;
|
||||
self.freelist.clear();
|
||||
for i in (0..self.capacity).rev() {
|
||||
self.map = PageHashMap::new(self.capacity);
|
||||
for &i in used_slots.iter().rev() {
|
||||
self.freelist.push(i);
|
||||
}
|
||||
Ok(())
|
||||
@@ -631,16 +647,6 @@ impl PageCache {
|
||||
self.capacity
|
||||
}
|
||||
|
||||
pub fn unset_dirty_all_pages(&mut self) {
|
||||
let entries = &self.entries;
|
||||
for entry in entries.iter() {
|
||||
if entry.page.is_none() {
|
||||
continue;
|
||||
}
|
||||
entry.page.as_ref().unwrap().clear_dirty();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn verify_cache_integrity(&self) {
|
||||
let map = &self.map;
|
||||
|
||||
@@ -1563,12 +1563,15 @@ impl Pager {
|
||||
/// of a rollback or in case we want to invalidate page cache after starting a read transaction
|
||||
/// right after new writes happened which would invalidate current page cache.
|
||||
pub fn clear_page_cache(&self) {
|
||||
self.dirty_pages.borrow_mut().clear();
|
||||
self.page_cache.write().unset_dirty_all_pages();
|
||||
self.page_cache
|
||||
.write()
|
||||
.clear()
|
||||
.expect("Failed to clear page cache");
|
||||
let dirty_pages = self.dirty_pages.borrow();
|
||||
let mut cache = self.page_cache.write();
|
||||
for page_id in dirty_pages.iter() {
|
||||
let page_key = PageCacheKey::new(*page_id);
|
||||
if let Some(page) = cache.get(&page_key).unwrap_or(None) {
|
||||
page.clear_dirty();
|
||||
}
|
||||
}
|
||||
cache.clear().expect("Failed to clear page cache");
|
||||
}
|
||||
|
||||
/// Checkpoint in Truncate mode and delete the WAL file. This method is _only_ to be called
|
||||
@@ -2118,6 +2121,7 @@ impl Pager {
|
||||
is_write: bool,
|
||||
) -> Result<(), LimboError> {
|
||||
tracing::debug!(schema_did_change);
|
||||
self.clear_page_cache();
|
||||
if is_write {
|
||||
self.dirty_pages.borrow_mut().clear();
|
||||
} else {
|
||||
@@ -2126,12 +2130,7 @@ impl Pager {
|
||||
"dirty pages should be empty for read txn"
|
||||
);
|
||||
}
|
||||
let mut cache = self.page_cache.write();
|
||||
|
||||
self.reset_internal_states();
|
||||
|
||||
cache.unset_dirty_all_pages();
|
||||
cache.clear().expect("failed to clear page cache");
|
||||
if schema_did_change {
|
||||
connection.schema.replace(connection._db.clone_schema()?);
|
||||
}
|
||||
|
||||
@@ -1622,11 +1622,14 @@ pub fn write_varint_to_vec(value: u64, payload: &mut Vec<u8>) {
|
||||
payload.extend_from_slice(&varint[0..n]);
|
||||
}
|
||||
|
||||
/// We need to read the WAL file on open to reconstruct the WAL frame cache.
|
||||
pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<RwLock<WalFileShared>>> {
|
||||
/// Stream through frames in chunks, building frame_cache incrementally
|
||||
/// Track last valid commit frame for consistency
|
||||
pub fn build_shared_wal(
|
||||
file: &Arc<dyn File>,
|
||||
io: &Arc<dyn crate::IO>,
|
||||
) -> Result<Arc<RwLock<WalFileShared>>> {
|
||||
let size = file.size()?;
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
let buf_for_pread = Arc::new(Buffer::new_temporary(size as usize));
|
||||
|
||||
let header = Arc::new(SpinLock::new(WalHeader::default()));
|
||||
let read_locks = std::array::from_fn(|_| TursoRwLock::new());
|
||||
for (i, l) in read_locks.iter().enumerate() {
|
||||
@@ -1634,8 +1637,8 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<RwLock<WalFileSh
|
||||
l.set_value_exclusive(if i < 2 { 0 } else { READMARK_NOT_USED });
|
||||
l.unlock();
|
||||
}
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
let wal_file_shared_ret = Arc::new(RwLock::new(WalFileShared {
|
||||
|
||||
let wal_file_shared = Arc::new(RwLock::new(WalFileShared {
|
||||
enabled: AtomicBool::new(true),
|
||||
wal_header: header.clone(),
|
||||
min_frame: AtomicU64::new(0),
|
||||
@@ -1650,205 +1653,306 @@ pub fn read_entire_wal_dumb(file: &Arc<dyn File>) -> Result<Arc<RwLock<WalFileSh
|
||||
checkpoint_lock: TursoRwLock::new(),
|
||||
initialized: AtomicBool::new(false),
|
||||
}));
|
||||
let wal_file_shared_for_completion = wal_file_shared_ret.clone();
|
||||
let complete: Box<ReadComplete> = Box::new(move |res: Result<(Arc<Buffer>, i32), _>| {
|
||||
|
||||
if size < WAL_HEADER_SIZE as u64 {
|
||||
wal_file_shared.write().loaded.store(true, Ordering::SeqCst);
|
||||
return Ok(wal_file_shared);
|
||||
}
|
||||
|
||||
let reader = Arc::new(StreamingWalReader::new(
|
||||
file.clone(),
|
||||
wal_file_shared.clone(),
|
||||
header.clone(),
|
||||
size,
|
||||
));
|
||||
|
||||
let h = reader.clone().read_header()?;
|
||||
io.wait_for_completion(h)?;
|
||||
|
||||
loop {
|
||||
if reader.done.load(Ordering::Acquire) {
|
||||
break;
|
||||
}
|
||||
let offset = reader.off_atomic.load(Ordering::Acquire);
|
||||
if offset >= size {
|
||||
reader.finalize_loading();
|
||||
break;
|
||||
}
|
||||
|
||||
let (_read_size, c) = reader.clone().submit_one_chunk(offset)?;
|
||||
io.wait_for_completion(c)?;
|
||||
|
||||
let new_off = reader.off_atomic.load(Ordering::Acquire);
|
||||
if new_off <= offset {
|
||||
reader.finalize_loading();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(wal_file_shared)
|
||||
}
|
||||
|
||||
pub(super) struct StreamingWalReader {
|
||||
file: Arc<dyn File>,
|
||||
wal_shared: Arc<RwLock<WalFileShared>>,
|
||||
header: Arc<SpinLock<WalHeader>>,
|
||||
file_size: u64,
|
||||
state: RwLock<StreamingState>,
|
||||
off_atomic: AtomicU64,
|
||||
page_atomic: AtomicU64,
|
||||
pub(super) done: AtomicBool,
|
||||
}
|
||||
|
||||
/// Mutable state for streaming reader
|
||||
struct StreamingState {
|
||||
frame_idx: u64,
|
||||
cumulative_checksum: (u32, u32),
|
||||
last_valid_frame: u64,
|
||||
pending_frames: HashMap<u64, Vec<u64>>,
|
||||
page_size: usize,
|
||||
use_native_endian: bool,
|
||||
header_valid: bool,
|
||||
}
|
||||
|
||||
impl StreamingWalReader {
|
||||
fn new(
|
||||
file: Arc<dyn File>,
|
||||
wal_shared: Arc<RwLock<WalFileShared>>,
|
||||
header: Arc<SpinLock<WalHeader>>,
|
||||
file_size: u64,
|
||||
) -> Self {
|
||||
Self {
|
||||
file,
|
||||
wal_shared,
|
||||
header,
|
||||
file_size,
|
||||
off_atomic: AtomicU64::new(0),
|
||||
page_atomic: AtomicU64::new(0),
|
||||
done: AtomicBool::new(false),
|
||||
state: RwLock::new(StreamingState {
|
||||
frame_idx: 1,
|
||||
cumulative_checksum: (0, 0),
|
||||
last_valid_frame: 0,
|
||||
pending_frames: HashMap::new(),
|
||||
page_size: 0,
|
||||
use_native_endian: false,
|
||||
header_valid: false,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_header(self: Arc<Self>) -> crate::Result<Completion> {
|
||||
let header_buf = Arc::new(Buffer::new_temporary(WAL_HEADER_SIZE));
|
||||
let reader = self.clone();
|
||||
let completion: Box<ReadComplete> = Box::new(move |res| {
|
||||
let _reader = reader.clone();
|
||||
_reader.handle_header_read(res);
|
||||
});
|
||||
let c = Completion::new_read(header_buf, completion);
|
||||
self.file.pread(0, c)
|
||||
}
|
||||
|
||||
fn submit_one_chunk(self: Arc<Self>, offset: u64) -> crate::Result<(usize, Completion)> {
|
||||
let page_size = self.page_atomic.load(Ordering::Acquire) as usize;
|
||||
if page_size == 0 {
|
||||
return Err(crate::LimboError::InternalError(
|
||||
"page size not initialized".into(),
|
||||
));
|
||||
}
|
||||
let frame_size = WAL_FRAME_HEADER_SIZE + page_size;
|
||||
if frame_size == 0 {
|
||||
return Err(crate::LimboError::InternalError(
|
||||
"invalid frame size".into(),
|
||||
));
|
||||
}
|
||||
const BASE: usize = 16 * 1024 * 1024;
|
||||
let aligned = (BASE / frame_size) * frame_size;
|
||||
let read_size = aligned
|
||||
.max(frame_size)
|
||||
.min((self.file_size - offset) as usize);
|
||||
if read_size == 0 {
|
||||
// end-of-file; let caller finalize
|
||||
return Ok((0, Completion::new_dummy()));
|
||||
}
|
||||
|
||||
let buf = Arc::new(Buffer::new_temporary(read_size));
|
||||
let me = self.clone();
|
||||
let completion: Box<ReadComplete> = Box::new(move |res| {
|
||||
tracing::debug!("WAL chunk read complete");
|
||||
let reader = me.clone();
|
||||
reader.handle_chunk_read(res);
|
||||
});
|
||||
let c = Completion::new_read(buf, completion);
|
||||
let guard = self.file.pread(offset, c)?;
|
||||
Ok((read_size, guard))
|
||||
}
|
||||
|
||||
fn handle_header_read(self: Arc<Self>, res: Result<(Arc<Buffer>, i32), CompletionError>) {
|
||||
let Ok((buf, bytes_read)) = res else {
|
||||
self.finalize_loading();
|
||||
return;
|
||||
};
|
||||
let buf_slice = buf.as_slice();
|
||||
turso_assert!(
|
||||
bytes_read == buf_slice.len() as i32,
|
||||
"read({bytes_read}) != expected({})",
|
||||
buf_slice.len()
|
||||
);
|
||||
let mut header_locked = header.lock();
|
||||
// Read header
|
||||
header_locked.magic =
|
||||
u32::from_be_bytes([buf_slice[0], buf_slice[1], buf_slice[2], buf_slice[3]]);
|
||||
header_locked.file_format =
|
||||
u32::from_be_bytes([buf_slice[4], buf_slice[5], buf_slice[6], buf_slice[7]]);
|
||||
header_locked.page_size =
|
||||
u32::from_be_bytes([buf_slice[8], buf_slice[9], buf_slice[10], buf_slice[11]]);
|
||||
header_locked.checkpoint_seq =
|
||||
u32::from_be_bytes([buf_slice[12], buf_slice[13], buf_slice[14], buf_slice[15]]);
|
||||
header_locked.salt_1 =
|
||||
u32::from_be_bytes([buf_slice[16], buf_slice[17], buf_slice[18], buf_slice[19]]);
|
||||
header_locked.salt_2 =
|
||||
u32::from_be_bytes([buf_slice[20], buf_slice[21], buf_slice[22], buf_slice[23]]);
|
||||
header_locked.checksum_1 =
|
||||
u32::from_be_bytes([buf_slice[24], buf_slice[25], buf_slice[26], buf_slice[27]]);
|
||||
header_locked.checksum_2 =
|
||||
u32::from_be_bytes([buf_slice[28], buf_slice[29], buf_slice[30], buf_slice[31]]);
|
||||
tracing::debug!("read_entire_wal_dumb(header={:?})", *header_locked);
|
||||
|
||||
// Read frames into frame_cache and pages_in_frames
|
||||
if buf_slice.len() < WAL_HEADER_SIZE {
|
||||
panic!("WAL file too small for header");
|
||||
if bytes_read != WAL_HEADER_SIZE as i32 {
|
||||
self.finalize_loading();
|
||||
return;
|
||||
}
|
||||
|
||||
let use_native_endian_checksum =
|
||||
cfg!(target_endian = "big") == ((header_locked.magic & 1) != 0);
|
||||
let (page_sz, c1, c2, use_native, ok) = {
|
||||
let mut h = self.header.lock();
|
||||
let s = buf.as_slice();
|
||||
h.magic = u32::from_be_bytes(s[0..4].try_into().unwrap());
|
||||
h.file_format = u32::from_be_bytes(s[4..8].try_into().unwrap());
|
||||
h.page_size = u32::from_be_bytes(s[8..12].try_into().unwrap());
|
||||
h.checkpoint_seq = u32::from_be_bytes(s[12..16].try_into().unwrap());
|
||||
h.salt_1 = u32::from_be_bytes(s[16..20].try_into().unwrap());
|
||||
h.salt_2 = u32::from_be_bytes(s[20..24].try_into().unwrap());
|
||||
h.checksum_1 = u32::from_be_bytes(s[24..28].try_into().unwrap());
|
||||
h.checksum_2 = u32::from_be_bytes(s[28..32].try_into().unwrap());
|
||||
tracing::debug!("WAL header: {:?}", *h);
|
||||
|
||||
let calculated_header_checksum = checksum_wal(
|
||||
&buf_slice[0..24],
|
||||
&header_locked,
|
||||
(0, 0),
|
||||
use_native_endian_checksum,
|
||||
);
|
||||
|
||||
let checksum_header_failed = if calculated_header_checksum
|
||||
!= (header_locked.checksum_1, header_locked.checksum_2)
|
||||
let use_native = cfg!(target_endian = "big") == ((h.magic & 1) != 0);
|
||||
let calc = checksum_wal(&s[0..24], &h, (0, 0), use_native);
|
||||
(
|
||||
h.page_size,
|
||||
h.checksum_1,
|
||||
h.checksum_2,
|
||||
use_native,
|
||||
calc == (h.checksum_1, h.checksum_2),
|
||||
)
|
||||
};
|
||||
if PageSize::new(page_sz).is_none() || !ok {
|
||||
self.finalize_loading();
|
||||
return;
|
||||
}
|
||||
{
|
||||
tracing::error!(
|
||||
"WAL header checksum mismatch. Expected ({}, {}), Got ({}, {}). Ignoring frames starting from frame {}",
|
||||
header_locked.checksum_1,
|
||||
header_locked.checksum_2,
|
||||
calculated_header_checksum.0,
|
||||
calculated_header_checksum.1,
|
||||
0
|
||||
let mut st = self.state.write();
|
||||
st.page_size = page_sz as usize;
|
||||
st.use_native_endian = use_native;
|
||||
st.cumulative_checksum = (c1, c2);
|
||||
st.header_valid = true;
|
||||
}
|
||||
self.off_atomic
|
||||
.store(WAL_HEADER_SIZE as u64, Ordering::Release);
|
||||
self.page_atomic.store(page_sz as u64, Ordering::Release);
|
||||
}
|
||||
|
||||
);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
fn handle_chunk_read(self: Arc<Self>, res: Result<(Arc<Buffer>, i32), CompletionError>) {
|
||||
let Ok((buf, bytes_read)) = res else {
|
||||
self.finalize_loading();
|
||||
return;
|
||||
};
|
||||
let buf_slice = &buf.as_slice()[..bytes_read as usize];
|
||||
// Snapshot salts/endianness once to avoid per-frame header locks
|
||||
let (header_copy, use_native) = {
|
||||
let st = self.state.read();
|
||||
let h = self.header.lock();
|
||||
(*h, st.use_native_endian)
|
||||
};
|
||||
|
||||
let mut cumulative_checksum = (header_locked.checksum_1, header_locked.checksum_2);
|
||||
let page_size_u32 = header_locked.page_size;
|
||||
|
||||
if PageSize::new(page_size_u32).is_none() {
|
||||
panic!("Invalid page size in WAL header: {page_size_u32}");
|
||||
let consumed = self.process_frames(buf_slice, &header_copy, use_native);
|
||||
self.off_atomic.fetch_add(consumed as u64, Ordering::AcqRel);
|
||||
// If we didn’t consume the full chunk, we hit a stop condition
|
||||
if consumed < buf_slice.len() || self.off_atomic.load(Ordering::Acquire) >= self.file_size {
|
||||
self.finalize_loading();
|
||||
}
|
||||
let page_size = page_size_u32 as usize;
|
||||
}
|
||||
|
||||
let mut current_offset = WAL_HEADER_SIZE;
|
||||
let mut frame_idx = 1_u64;
|
||||
// Processes frames from a buffer, returns bytes processed
|
||||
fn process_frames(&self, buf: &[u8], header: &WalHeader, use_native: bool) -> usize {
|
||||
let mut st = self.state.write();
|
||||
let page_size = st.page_size;
|
||||
let frame_size = WAL_FRAME_HEADER_SIZE + page_size;
|
||||
let mut pos = 0;
|
||||
|
||||
let mut wfs_data = wal_file_shared_for_completion.write();
|
||||
while pos + frame_size <= buf.len() {
|
||||
let fh = &buf[pos..pos + WAL_FRAME_HEADER_SIZE];
|
||||
let page = &buf[pos + WAL_FRAME_HEADER_SIZE..pos + frame_size];
|
||||
|
||||
if !checksum_header_failed {
|
||||
while current_offset + WAL_FRAME_HEADER_SIZE + page_size <= buf_slice.len() {
|
||||
let frame_header_slice =
|
||||
&buf_slice[current_offset..current_offset + WAL_FRAME_HEADER_SIZE];
|
||||
let page_data_slice = &buf_slice[current_offset + WAL_FRAME_HEADER_SIZE
|
||||
..current_offset + WAL_FRAME_HEADER_SIZE + page_size];
|
||||
let page_number = u32::from_be_bytes(fh[0..4].try_into().unwrap());
|
||||
let db_size = u32::from_be_bytes(fh[4..8].try_into().unwrap());
|
||||
let s1 = u32::from_be_bytes(fh[8..12].try_into().unwrap());
|
||||
let s2 = u32::from_be_bytes(fh[12..16].try_into().unwrap());
|
||||
let c1 = u32::from_be_bytes(fh[16..20].try_into().unwrap());
|
||||
let c2 = u32::from_be_bytes(fh[20..24].try_into().unwrap());
|
||||
|
||||
let frame_h_page_number =
|
||||
u32::from_be_bytes(frame_header_slice[0..4].try_into().unwrap());
|
||||
let frame_h_db_size =
|
||||
u32::from_be_bytes(frame_header_slice[4..8].try_into().unwrap());
|
||||
let frame_h_salt_1 =
|
||||
u32::from_be_bytes(frame_header_slice[8..12].try_into().unwrap());
|
||||
let frame_h_salt_2 =
|
||||
u32::from_be_bytes(frame_header_slice[12..16].try_into().unwrap());
|
||||
let frame_h_checksum_1 =
|
||||
u32::from_be_bytes(frame_header_slice[16..20].try_into().unwrap());
|
||||
let frame_h_checksum_2 =
|
||||
u32::from_be_bytes(frame_header_slice[20..24].try_into().unwrap());
|
||||
if page_number == 0 {
|
||||
break;
|
||||
}
|
||||
if s1 != header.salt_1 || s2 != header.salt_2 {
|
||||
break;
|
||||
}
|
||||
|
||||
if frame_h_page_number == 0 {
|
||||
tracing::trace!(
|
||||
"WAL frame with page number 0. Ignoring frames starting from frame {}",
|
||||
frame_idx
|
||||
);
|
||||
break;
|
||||
let seed = checksum_wal(&fh[0..8], header, st.cumulative_checksum, use_native);
|
||||
let calc = checksum_wal(page, header, seed, use_native);
|
||||
if calc != (c1, c2) {
|
||||
break;
|
||||
}
|
||||
|
||||
st.cumulative_checksum = calc;
|
||||
let frame_idx = st.frame_idx;
|
||||
st.pending_frames
|
||||
.entry(page_number as u64)
|
||||
.or_default()
|
||||
.push(frame_idx);
|
||||
|
||||
if db_size > 0 {
|
||||
st.last_valid_frame = st.frame_idx;
|
||||
self.flush_pending_frames(&mut st);
|
||||
}
|
||||
st.frame_idx += 1;
|
||||
pos += frame_size;
|
||||
}
|
||||
pos
|
||||
}
|
||||
|
||||
fn flush_pending_frames(&self, state: &mut StreamingState) {
|
||||
if state.pending_frames.is_empty() {
|
||||
return;
|
||||
}
|
||||
let wfs = self.wal_shared.read();
|
||||
{
|
||||
let mut frame_cache = wfs.frame_cache.lock();
|
||||
for (page, mut frames) in state.pending_frames.drain() {
|
||||
// Only include frames up to last valid commit
|
||||
frames.retain(|&f| f <= state.last_valid_frame);
|
||||
if !frames.is_empty() {
|
||||
frame_cache.entry(page).or_default().extend(frames);
|
||||
}
|
||||
// It contains more frames with mismatched SALT values, which means they're leftovers from previous checkpoints
|
||||
if frame_h_salt_1 != header_locked.salt_1 || frame_h_salt_2 != header_locked.salt_2
|
||||
{
|
||||
tracing::trace!(
|
||||
"WAL frame salt mismatch: expected ({}, {}), got ({}, {}). Ignoring frames starting from frame {}",
|
||||
header_locked.salt_1,
|
||||
header_locked.salt_2,
|
||||
frame_h_salt_1,
|
||||
frame_h_salt_2,
|
||||
frame_idx
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
let checksum_after_fh_meta = checksum_wal(
|
||||
&frame_header_slice[0..8],
|
||||
&header_locked,
|
||||
cumulative_checksum,
|
||||
use_native_endian_checksum,
|
||||
);
|
||||
let calculated_frame_checksum = checksum_wal(
|
||||
page_data_slice,
|
||||
&header_locked,
|
||||
checksum_after_fh_meta,
|
||||
use_native_endian_checksum,
|
||||
);
|
||||
tracing::debug!(
|
||||
"read_entire_wal_dumb(frame_h_checksum=({}, {}), calculated_frame_checksum=({}, {}))",
|
||||
frame_h_checksum_1,
|
||||
frame_h_checksum_2,
|
||||
calculated_frame_checksum.0,
|
||||
calculated_frame_checksum.1
|
||||
);
|
||||
|
||||
if calculated_frame_checksum != (frame_h_checksum_1, frame_h_checksum_2) {
|
||||
tracing::error!(
|
||||
"WAL frame checksum mismatch. Expected ({}, {}), Got ({}, {}). Ignoring frames starting from frame {}",
|
||||
frame_h_checksum_1,
|
||||
frame_h_checksum_2,
|
||||
calculated_frame_checksum.0,
|
||||
calculated_frame_checksum.1,
|
||||
frame_idx
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
cumulative_checksum = calculated_frame_checksum;
|
||||
|
||||
wfs_data
|
||||
.frame_cache
|
||||
.lock()
|
||||
.entry(frame_h_page_number as u64)
|
||||
.or_default()
|
||||
.push(frame_idx);
|
||||
|
||||
let is_commit_record = frame_h_db_size > 0;
|
||||
if is_commit_record {
|
||||
wfs_data.max_frame.store(frame_idx, Ordering::SeqCst);
|
||||
wfs_data.last_checksum = cumulative_checksum;
|
||||
}
|
||||
|
||||
frame_idx += 1;
|
||||
current_offset += WAL_FRAME_HEADER_SIZE + page_size;
|
||||
}
|
||||
}
|
||||
wfs.max_frame
|
||||
.store(state.last_valid_frame, Ordering::Release);
|
||||
}
|
||||
|
||||
let max_frame = wfs_data.max_frame.load(Ordering::SeqCst);
|
||||
/// Finalizes the loading process
|
||||
fn finalize_loading(&self) {
|
||||
let mut wfs = self.wal_shared.write();
|
||||
let st = self.state.read();
|
||||
|
||||
// cleanup in-memory index from tail frames which was written after the last committed frame
|
||||
let mut frame_cache = wfs_data.frame_cache.lock();
|
||||
for (page, frames) in frame_cache.iter_mut() {
|
||||
// remove any frame IDs > max_frame
|
||||
let original_len = frames.len();
|
||||
frames.retain(|&frame_id| frame_id <= max_frame);
|
||||
if frames.len() < original_len {
|
||||
tracing::debug!(
|
||||
"removed {} frame(s) from page {} from the in-memory WAL index because they were written after the last committed frame {}",
|
||||
original_len - frames.len(),
|
||||
page,
|
||||
max_frame
|
||||
);
|
||||
let max_frame = st.last_valid_frame;
|
||||
if max_frame > 0 {
|
||||
let mut frame_cache = wfs.frame_cache.lock();
|
||||
for frames in frame_cache.values_mut() {
|
||||
frames.retain(|&f| f <= max_frame);
|
||||
}
|
||||
frame_cache.retain(|_, frames| !frames.is_empty());
|
||||
}
|
||||
// also remove any pages that now have no frames
|
||||
frame_cache.retain(|_page, frames| !frames.is_empty());
|
||||
|
||||
wfs_data.nbackfills.store(0, Ordering::SeqCst);
|
||||
wfs_data.loaded.store(true, Ordering::SeqCst);
|
||||
if size >= WAL_HEADER_SIZE as u64 {
|
||||
wfs_data.initialized.store(true, Ordering::SeqCst);
|
||||
wfs.max_frame.store(max_frame, Ordering::SeqCst);
|
||||
wfs.last_checksum = st.cumulative_checksum;
|
||||
if st.header_valid {
|
||||
wfs.initialized.store(true, Ordering::SeqCst);
|
||||
}
|
||||
});
|
||||
let c = Completion::new_read(buf_for_pread, complete);
|
||||
let _c = file.pread(0, c)?;
|
||||
wfs.nbackfills.store(0, Ordering::SeqCst);
|
||||
wfs.loaded.store(true, Ordering::SeqCst);
|
||||
|
||||
Ok(wal_file_shared_ret)
|
||||
self.done.store(true, Ordering::Release);
|
||||
tracing::info!(
|
||||
"WAL loading complete: {} frames processed, last commit at frame {}",
|
||||
st.frame_idx - 1,
|
||||
max_frame
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn begin_read_wal_frame_raw(
|
||||
@@ -1859,7 +1963,6 @@ pub fn begin_read_wal_frame_raw(
|
||||
) -> Result<Completion> {
|
||||
tracing::trace!("begin_read_wal_frame_raw(offset={})", offset);
|
||||
let buf = Arc::new(buffer_pool.get_wal_frame());
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
let c = Completion::new_read(buf, complete);
|
||||
let c = io.pread(offset, c)?;
|
||||
Ok(c)
|
||||
|
||||
@@ -553,7 +553,6 @@ impl fmt::Debug for OngoingCheckpoint {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct WalFile {
|
||||
io: Arc<dyn IO>,
|
||||
buffer_pool: Arc<BufferPool>,
|
||||
@@ -660,7 +659,6 @@ impl fmt::Debug for WalFile {
|
||||
// TODO(pere): lock only important parts + pin WalFileShared
|
||||
/// WalFileShared is the part of a WAL that will be shared between threads. A wal has information
|
||||
/// that needs to be communicated between threads so this struct does the job.
|
||||
#[allow(dead_code)]
|
||||
pub struct WalFileShared {
|
||||
pub enabled: AtomicBool,
|
||||
pub wal_header: Arc<SpinLock<WalHeader>>,
|
||||
@@ -676,7 +674,6 @@ pub struct WalFileShared {
|
||||
pub frame_cache: Arc<SpinLock<HashMap<u64, Vec<u64>>>>,
|
||||
pub last_checksum: (u32, u32), // Check of last frame in WAL, this is a cumulative checksum over all frames in the WAL
|
||||
pub file: Option<Arc<dyn File>>,
|
||||
|
||||
/// Read locks advertise the maximum WAL frame a reader may access.
|
||||
/// Slot 0 is special, when it is held (shared) the reader bypasses the WAL and uses the main DB file.
|
||||
/// When checkpointing, we must acquire the exclusive read lock 0 to ensure that no readers read
|
||||
@@ -2238,21 +2235,17 @@ impl WalFileShared {
|
||||
path: &str,
|
||||
) -> Result<Arc<RwLock<WalFileShared>>> {
|
||||
let file = io.open_file(path, crate::io::OpenFlags::Create, false)?;
|
||||
if file.size()? > 0 {
|
||||
let wal_file_shared = sqlite3_ondisk::read_entire_wal_dumb(&file)?;
|
||||
// TODO: Return a completion instead.
|
||||
let mut max_loops = 100_000;
|
||||
while !wal_file_shared.read().loaded.load(Ordering::Acquire) {
|
||||
io.run_once()?;
|
||||
max_loops -= 1;
|
||||
if max_loops == 0 {
|
||||
panic!("WAL file not loaded");
|
||||
}
|
||||
}
|
||||
Ok(wal_file_shared)
|
||||
} else {
|
||||
WalFileShared::new_noop()
|
||||
if file.size()? == 0 {
|
||||
return WalFileShared::new_noop();
|
||||
}
|
||||
let wal_file_shared = sqlite3_ondisk::build_shared_wal(&file, io)?;
|
||||
turso_assert!(
|
||||
wal_file_shared
|
||||
.try_read()
|
||||
.is_some_and(|wfs| wfs.loaded.load(Ordering::Acquire)),
|
||||
"Unable to read WAL shared state"
|
||||
);
|
||||
Ok(wal_file_shared)
|
||||
}
|
||||
|
||||
pub fn is_initialized(&self) -> Result<bool> {
|
||||
|
||||
@@ -190,6 +190,34 @@ fn optimize_table_access(
|
||||
let maybe_order_target = compute_order_target(order_by, group_by.as_mut());
|
||||
let constraints_per_table =
|
||||
constraints_from_where_clause(where_clause, table_references, available_indexes)?;
|
||||
|
||||
// Currently the expressions we evaluate as constraints are binary expressions that will never be true for a NULL operand.
|
||||
// If there are any constraints on the right hand side table of an outer join that are not part of the outer join condition,
|
||||
// the outer join can be converted into an inner join.
|
||||
// for example:
|
||||
// - SELECT * FROM t1 LEFT JOIN t2 ON false WHERE t2.id = 5
|
||||
// there can never be a situation where null columns are emitted for t2 because t2.id = 5 will never be true in that case.
|
||||
// hence: we can convert the outer join into an inner join.
|
||||
for (i, t) in table_references
|
||||
.joined_tables_mut()
|
||||
.iter_mut()
|
||||
.enumerate()
|
||||
.filter(|(_, t)| {
|
||||
t.join_info
|
||||
.as_ref()
|
||||
.is_some_and(|join_info| join_info.outer)
|
||||
})
|
||||
{
|
||||
if constraints_per_table[i]
|
||||
.constraints
|
||||
.iter()
|
||||
.any(|c| where_clause[c.where_clause_pos.0].from_outer_join.is_none())
|
||||
{
|
||||
t.join_info.as_mut().unwrap().outer = false;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let Some(best_join_order_result) = compute_best_join_order(
|
||||
table_references.joined_tables_mut(),
|
||||
maybe_order_target.as_ref(),
|
||||
|
||||
@@ -1455,13 +1455,16 @@ pub fn op_column(
|
||||
index_cursor_id,
|
||||
table_cursor_id,
|
||||
} => {
|
||||
let rowid = {
|
||||
let Some(rowid) = ({
|
||||
let index_cursor = state.get_cursor(index_cursor_id);
|
||||
let index_cursor = index_cursor.as_btree_mut();
|
||||
return_if_io!(index_cursor.rowid())
|
||||
}) else {
|
||||
state.registers[*dest] = Register::Value(Value::Null);
|
||||
break 'outer;
|
||||
};
|
||||
state.op_column_state = OpColumnState::Seek {
|
||||
rowid: rowid.unwrap(),
|
||||
rowid,
|
||||
table_cursor_id,
|
||||
};
|
||||
}
|
||||
@@ -6991,16 +6994,34 @@ pub fn op_open_ephemeral(
|
||||
let conn = program.connection.clone();
|
||||
let io = conn.pager.borrow().io.clone();
|
||||
let rand_num = io.generate_random_number();
|
||||
let temp_dir = temp_dir();
|
||||
let rand_path =
|
||||
std::path::Path::new(&temp_dir).join(format!("tursodb-ephemeral-{rand_num}"));
|
||||
let Some(rand_path_str) = rand_path.to_str() else {
|
||||
return Err(LimboError::InternalError(
|
||||
"Failed to convert path to string".to_string(),
|
||||
));
|
||||
};
|
||||
let file = io.open_file(rand_path_str, OpenFlags::Create, false)?;
|
||||
let db_file = Arc::new(DatabaseFile::new(file));
|
||||
let db_file;
|
||||
let db_file_io: Arc<dyn crate::IO>;
|
||||
|
||||
// we support OPFS in WASM - but it require files to be pre-opened in the browser before use
|
||||
// we can fix this if we will make open_file interface async
|
||||
// but for now for simplicity we use MemoryIO for all intermediate calculations
|
||||
#[cfg(target_family = "wasm")]
|
||||
{
|
||||
use crate::MemoryIO;
|
||||
|
||||
db_file_io = Arc::new(MemoryIO::new());
|
||||
let file = db_file_io.open_file("temp-file", OpenFlags::Create, false)?;
|
||||
db_file = Arc::new(DatabaseFile::new(file));
|
||||
}
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
{
|
||||
let temp_dir = temp_dir();
|
||||
let rand_path =
|
||||
std::path::Path::new(&temp_dir).join(format!("tursodb-ephemeral-{rand_num}"));
|
||||
let Some(rand_path_str) = rand_path.to_str() else {
|
||||
return Err(LimboError::InternalError(
|
||||
"Failed to convert path to string".to_string(),
|
||||
));
|
||||
};
|
||||
let file = io.open_file(rand_path_str, OpenFlags::Create, false)?;
|
||||
db_file = Arc::new(DatabaseFile::new(file));
|
||||
db_file_io = io;
|
||||
}
|
||||
|
||||
let page_size = pager
|
||||
.io
|
||||
@@ -7013,7 +7034,7 @@ pub fn op_open_ephemeral(
|
||||
let pager = Rc::new(Pager::new(
|
||||
db_file,
|
||||
None,
|
||||
io,
|
||||
db_file_io,
|
||||
page_cache,
|
||||
buffer_pool.clone(),
|
||||
Arc::new(AtomicDbState::new(DbState::Uninitialized)),
|
||||
|
||||
@@ -97,14 +97,46 @@ def wrap_text(text, width=72):
|
||||
return "\n".join(wrapped_lines)
|
||||
|
||||
|
||||
def check_pr_status(pr_number):
|
||||
"""Check the status of all checks for a PR
|
||||
|
||||
Returns a tuple of (has_failing, has_pending) indicating if there are
|
||||
any failing or pending checks respectively.
|
||||
"""
|
||||
output, error, returncode = run_command(f"gh pr checks {pr_number} --json state,name,startedAt,completedAt")
|
||||
if returncode != 0:
|
||||
print(f"Warning: Unable to get PR check status: {error}")
|
||||
return False, False
|
||||
|
||||
checks_data = json.loads(output)
|
||||
if not checks_data:
|
||||
return False, False
|
||||
|
||||
has_failing = any(check.get("state") == "FAILURE" for check in checks_data)
|
||||
has_pending = any(
|
||||
check.get("startedAt") and not check.get("completedAt") or check.get("state") == "IN_PROGRESS"
|
||||
for check in checks_data
|
||||
)
|
||||
return has_failing, has_pending
|
||||
|
||||
|
||||
def merge_remote(pr_number: int, commit_message: str, commit_title: str):
|
||||
output, error, returncode = run_command(f"gh pr checks {pr_number} --json state")
|
||||
if returncode == 0:
|
||||
checks_data = json.loads(output)
|
||||
if checks_data and any(check.get("state") == "FAILURE" for check in checks_data):
|
||||
print("Warning: Some checks are failing")
|
||||
if input("Do you want to proceed with the merge? (y/N): ").strip().lower() != "y":
|
||||
exit(0)
|
||||
has_failing, has_pending = check_pr_status(pr_number)
|
||||
|
||||
prompt_needed = False
|
||||
warning_msg = ""
|
||||
|
||||
if has_failing:
|
||||
prompt_needed = True
|
||||
warning_msg = "Warning: Some checks are failing"
|
||||
elif has_pending:
|
||||
prompt_needed = True
|
||||
warning_msg = "Warning: Some checks are still running"
|
||||
|
||||
if prompt_needed:
|
||||
print(warning_msg)
|
||||
if input("Do you want to proceed with the merge? (y/N): ").strip().lower() != "y":
|
||||
exit(0)
|
||||
|
||||
# Create a temporary file for the commit message
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as temp_file:
|
||||
@@ -131,6 +163,23 @@ def merge_remote(pr_number: int, commit_message: str, commit_title: str):
|
||||
|
||||
|
||||
def merge_local(pr_number: int, commit_message: str):
|
||||
has_failing, has_pending = check_pr_status(pr_number)
|
||||
|
||||
prompt_needed = False
|
||||
warning_msg = ""
|
||||
|
||||
if has_failing:
|
||||
prompt_needed = True
|
||||
warning_msg = "Warning: Some checks are failing"
|
||||
elif has_pending:
|
||||
prompt_needed = True
|
||||
warning_msg = "Warning: Some checks are still running"
|
||||
|
||||
if prompt_needed:
|
||||
print(warning_msg)
|
||||
if input("Do you want to proceed with the merge? (y/N): ").strip().lower() != "y":
|
||||
exit(0)
|
||||
|
||||
current_branch, _, _ = run_command("git branch --show-current")
|
||||
|
||||
print(f"Fetching PR #{pr_number}...")
|
||||
|
||||
@@ -17,13 +17,20 @@ from pathlib import Path
|
||||
# Define all npm package paths in one place
|
||||
NPM_PACKAGES = [
|
||||
"bindings/javascript",
|
||||
"bindings/javascript/npm/darwin-universal",
|
||||
"bindings/javascript/npm/linux-x64-gnu",
|
||||
"bindings/javascript/npm/win32-x64-msvc",
|
||||
"bindings/javascript/npm/wasm32-wasip1-threads",
|
||||
"bindings/javascript/packages/common",
|
||||
"bindings/javascript/packages/native",
|
||||
"bindings/javascript/packages/browser",
|
||||
"sync/javascript",
|
||||
]
|
||||
|
||||
NPM_WORKSPACE_PACKAGES = [
|
||||
"@tursodatabase/database-common"
|
||||
]
|
||||
|
||||
NPM_WORKSPACES = [
|
||||
"bindings/javascript"
|
||||
]
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Update version in project files")
|
||||
@@ -79,6 +86,11 @@ def update_package_json(dir_path, new_version): # noqa: C901
|
||||
|
||||
# Update version regardless of current value
|
||||
package_data["version"] = new_version
|
||||
if "dependencies" in package_data:
|
||||
for dependency in package_data["dependencies"].keys():
|
||||
if dependency not in NPM_WORKSPACE_PACKAGES:
|
||||
continue
|
||||
package_data["dependencies"][dependency] = f"^{new_version}"
|
||||
|
||||
# Write updated package.json
|
||||
with open(package_path, "w") as f:
|
||||
@@ -120,6 +132,25 @@ def update_package_json(dir_path, new_version): # noqa: C901
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def run_npm_install(path):
|
||||
"""Run npm install to update package-lock.json"""
|
||||
try:
|
||||
# Run cargo update showing its output with verbose flag
|
||||
print(f"Info: run npm install at path {path}")
|
||||
subprocess.run(["npm", "install"], check=True, cwd=path)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def run_yarn_install(path):
|
||||
"""Run yarn install to update yarn-lock.json"""
|
||||
try:
|
||||
# Run cargo update showing its output with verbose flag
|
||||
print(f"Info: run yarn install at path {path}")
|
||||
subprocess.run(["yarn", "install"], check=True, cwd=path)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_all_packages(new_version):
|
||||
"""Update all npm packages with the new version."""
|
||||
@@ -127,6 +158,9 @@ def update_all_packages(new_version):
|
||||
for package_path in NPM_PACKAGES:
|
||||
result = update_package_json(package_path, new_version)
|
||||
results.append((package_path, result))
|
||||
for workspace_path in NPM_WORKSPACES:
|
||||
run_npm_install(workspace_path)
|
||||
run_yarn_install(workspace_path)
|
||||
return results
|
||||
|
||||
|
||||
@@ -134,6 +168,7 @@ def run_cargo_update():
|
||||
"""Run cargo update to update the Cargo.lock file."""
|
||||
try:
|
||||
# Run cargo update showing its output with verbose flag
|
||||
print("Info: run cargo update")
|
||||
subprocess.run(["cargo", "update", "--workspace", "--verbose"], check=True)
|
||||
return True
|
||||
except Exception:
|
||||
@@ -150,11 +185,14 @@ def create_git_commit_and_tag(version):
|
||||
for package_path in NPM_PACKAGES:
|
||||
package_json = f"{package_path}/package.json"
|
||||
package_lock = f"{package_path}/package-lock.json"
|
||||
yarn_lock = f"{package_path}/yarn.lock"
|
||||
|
||||
if os.path.exists(package_json):
|
||||
files_to_add.append(package_json)
|
||||
if os.path.exists(package_lock):
|
||||
files_to_add.append(package_lock)
|
||||
if os.path.exists(yarn_lock):
|
||||
files_to_add.append(yarn_lock)
|
||||
|
||||
# Add each file individually
|
||||
for file in files_to_add:
|
||||
|
||||
@@ -171,10 +171,16 @@ while (new Date().getTime() - startTime.getTime() < TIME_LIMIT_MINUTES * 60 * 10
|
||||
args.push('--seed', seed);
|
||||
// Bugbase wants to have .git available, so we disable it
|
||||
args.push("--disable-bugbase");
|
||||
|
||||
if (Math.random() < 0.5) {
|
||||
args.push("--profile", "faultless");
|
||||
}
|
||||
|
||||
args.push(...["--minimum-tests", "100", "--maximum-tests", "1000"]);
|
||||
const loop = args.includes("loop") ? [] : ["loop", "-n", "10", "--short-circuit"]
|
||||
args.push(...loop);
|
||||
|
||||
|
||||
console.log(`[${timestamp}]: Running "limbo_sim ${args.join(" ")}" - (seed ${seed}, run number ${runNumber})`);
|
||||
const issuePosted = await run(seed, "limbo_sim", args);
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ impl Profile {
|
||||
},
|
||||
query: QueryProfile {
|
||||
create_table_weight: 0,
|
||||
create_index_weight: 0,
|
||||
create_index_weight: 4,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
|
||||
@@ -259,16 +259,16 @@ impl SimplePredicate {
|
||||
table: &T,
|
||||
row: &[SimValue],
|
||||
) -> Self {
|
||||
// Pick a random column
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
let column_index = rng.random_range(0..columns.len());
|
||||
let column = columns[column_index];
|
||||
let column_value = &row[column_index];
|
||||
let table_name = column.table_name;
|
||||
// Avoid creation of NULLs
|
||||
if row.is_empty() {
|
||||
return SimplePredicate(Predicate(Expr::Literal(SimValue::TRUE.into())));
|
||||
}
|
||||
// Pick a random column
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
let column_index = rng.random_range(0..row.len());
|
||||
let column = columns[column_index];
|
||||
let column_value = &row[column_index];
|
||||
let table_name = column.table_name;
|
||||
|
||||
let expr = one_of(
|
||||
vec![
|
||||
@@ -317,16 +317,16 @@ impl SimplePredicate {
|
||||
table: &T,
|
||||
row: &[SimValue],
|
||||
) -> Self {
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
// Pick a random column
|
||||
let column_index = rng.random_range(0..columns.len());
|
||||
let column = columns[column_index];
|
||||
let column_value = &row[column_index];
|
||||
let table_name = column.table_name;
|
||||
// Avoid creation of NULLs
|
||||
if row.is_empty() {
|
||||
return SimplePredicate(Predicate(Expr::Literal(SimValue::FALSE.into())));
|
||||
}
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
// Pick a random column
|
||||
let column_index = rng.random_range(0..row.len());
|
||||
let column = columns[column_index];
|
||||
let column_value = &row[column_index];
|
||||
let table_name = column.table_name;
|
||||
|
||||
let expr = one_of(
|
||||
vec![
|
||||
|
||||
@@ -124,12 +124,11 @@ impl SimplePredicate {
|
||||
pub fn true_unary<R: rand::Rng, C: GenerationContext, T: TableContext>(
|
||||
rng: &mut R,
|
||||
context: &C,
|
||||
table: &T,
|
||||
_table: &T,
|
||||
row: &[SimValue],
|
||||
) -> Self {
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
// Pick a random column
|
||||
let column_index = rng.random_range(0..columns.len());
|
||||
let column_index = rng.random_range(0..row.len());
|
||||
let column_value = &row[column_index];
|
||||
let num_retries = row.len();
|
||||
// Avoid creation of NULLs
|
||||
@@ -191,18 +190,17 @@ impl SimplePredicate {
|
||||
pub fn false_unary<R: rand::Rng, C: GenerationContext, T: TableContext>(
|
||||
rng: &mut R,
|
||||
context: &C,
|
||||
table: &T,
|
||||
_table: &T,
|
||||
row: &[SimValue],
|
||||
) -> Self {
|
||||
let columns = table.columns().collect::<Vec<_>>();
|
||||
// Pick a random column
|
||||
let column_index = rng.random_range(0..columns.len());
|
||||
let column_value = &row[column_index];
|
||||
let num_retries = row.len();
|
||||
// Avoid creation of NULLs
|
||||
if row.is_empty() {
|
||||
return SimplePredicate(Predicate(Expr::Literal(SimValue::FALSE.into())));
|
||||
}
|
||||
// Pick a random column
|
||||
let column_index = rng.random_range(0..row.len());
|
||||
let column_value = &row[column_index];
|
||||
let num_retries = row.len();
|
||||
let expr = backtrack(
|
||||
vec![
|
||||
// (
|
||||
|
||||
@@ -229,7 +229,7 @@ impl FromClause {
|
||||
|
||||
let mut join_table = JoinTable {
|
||||
tables: vec![first_table.clone()],
|
||||
rows: Vec::new(),
|
||||
rows: first_table.rows.clone(),
|
||||
};
|
||||
|
||||
for join in &self.joins {
|
||||
|
||||
@@ -519,7 +519,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut conn = db.lock().await.connect()?;
|
||||
println!("\rExecuting queries...");
|
||||
for query_index in 0..nr_iterations {
|
||||
if gen_bool(0.001) {
|
||||
if gen_bool(0.001) && false {
|
||||
if opts.verbose {
|
||||
println!("Reopening database");
|
||||
}
|
||||
@@ -531,7 +531,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
}
|
||||
*db_guard = builder.build().await?;
|
||||
conn = db_guard.connect()?;
|
||||
} else if gen_bool(0.01) {
|
||||
} else if gen_bool(0.01) && false {
|
||||
// Reconnect to the database
|
||||
if opts.verbose {
|
||||
println!("Reconnecting to database");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.1.5-pre.3",
|
||||
"version": "0.1.5-pre.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
|
||||
@@ -329,3 +329,13 @@ do_execsql_test_on_specific_db {:memory:} next-crash {
|
||||
select a.x, b.x, c.x from a left join b on a.y=b.x left join c on b.y=c.x;
|
||||
} {1||
|
||||
2||}
|
||||
|
||||
# regression test for crash in op_column
|
||||
do_execsql_test_on_specific_db {:memory:} left-join-column-crash {
|
||||
create table a(x int primary key,y);
|
||||
create table b(x int primary key,y);
|
||||
insert into a values (1,1),(2,2);
|
||||
insert into b values (3,3),(4,4);
|
||||
select * from a left join b on a.x < 2 where a.x < 3 and b.x < 12;
|
||||
} {1|1|3|3
|
||||
1|1|4|4}
|
||||
@@ -1342,3 +1342,82 @@ do_execsql_test json_each_json_extract_on_value {
|
||||
{k3|[3]}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-array-basic {
|
||||
SELECT key, value, type, path, fullkey FROM json_each('{"a":[1,2,3]}', '$.a') ORDER BY key;
|
||||
} {
|
||||
{0|1|integer|$.a|$.a[0]}
|
||||
{1|2|integer|$.a|$.a[1]}
|
||||
{2|3|integer|$.a|$.a[2]}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-object-basic {
|
||||
SELECT key, value, type, path, fullkey
|
||||
FROM json_each('{"obj":{"a":[1,2],"n":10,"x":"y"}}', '$.obj')
|
||||
ORDER BY key;
|
||||
} {
|
||||
{a|[1,2]|array|$.obj|$.obj.a}
|
||||
{n|10|integer|$.obj|$.obj.n}
|
||||
{x|y|text|$.obj|$.obj.x}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-root-dollar-array {
|
||||
SELECT key, value, type
|
||||
FROM json_each('[4,5]', '$')
|
||||
ORDER BY key;
|
||||
} {
|
||||
{0|4|integer}
|
||||
{1|5|integer}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-start-at-primitive {
|
||||
SELECT value, type, path, fullkey FROM json_each('{"a":[1,2,3]}', '$.a[1]');
|
||||
} {
|
||||
{2|integer|$.a[1]|$.a[1]}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-start-at-object-inside-array {
|
||||
SELECT key, value, type, path, fullkey
|
||||
FROM json_each('{"arr":[{"x":1},{"y":2}]}', '$.arr[1]');
|
||||
} {
|
||||
{y|2|integer|$.arr[1]|$.arr[1].y}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-nonexistent-path-returns-no-rows {
|
||||
SELECT count(*) FROM json_each('{"a":1}', '$.missing');
|
||||
} {{0}}
|
||||
|
||||
do_execsql_test json-each-2arg-empty-array {
|
||||
SELECT count(*) FROM json_each('{"a":[]}', '$.a');
|
||||
} {{0}}
|
||||
|
||||
do_execsql_test json-each-2arg-empty-object {
|
||||
SELECT count(*) FROM json_each('{"o":{}}', '$.o');
|
||||
} {{0}}
|
||||
|
||||
do_execsql_test json-each-2arg-bools-and-null {
|
||||
SELECT typeof(value), type
|
||||
FROM json_each('{"a":[null,true,false]}', '$.a')
|
||||
ORDER BY key;
|
||||
} {
|
||||
{null|null}
|
||||
{integer|true}
|
||||
{integer|false}
|
||||
}
|
||||
|
||||
do_execsql_test json-each-2arg-primitive-key-null {
|
||||
SELECT typeof(key), value, type, path, fullkey
|
||||
FROM json_each('{"s":"hi"}', '$.s');
|
||||
} {{null|hi|text|$.s|$.s}}
|
||||
|
||||
do_execsql_test json-each-2arg-negative-index-root {
|
||||
SELECT key, value, type
|
||||
FROM json_each('[{"a":1},{"b":2},{"c":3}]', '$[#-1]');
|
||||
} {{c|3|integer}}
|
||||
|
||||
do_execsql_test_in_memory_any_error non-string-path {
|
||||
SELECT * FROM json_each('{}', 123);
|
||||
}
|
||||
|
||||
do_execsql_test_in_memory_any_error invalid-path {
|
||||
SELECT * FROM json_each('{}', '$$$');
|
||||
}
|
||||
|
||||
@@ -208,6 +208,45 @@ pub(crate) fn limbo_exec_rows(
|
||||
rows
|
||||
}
|
||||
|
||||
pub(crate) fn limbo_exec_rows_fallible(
|
||||
_db: &TempDatabase,
|
||||
conn: &Arc<turso_core::Connection>,
|
||||
query: &str,
|
||||
) -> Result<Vec<Vec<rusqlite::types::Value>>, turso_core::LimboError> {
|
||||
let mut stmt = conn.prepare(query)?;
|
||||
let mut rows = Vec::new();
|
||||
'outer: loop {
|
||||
let row = loop {
|
||||
let result = stmt.step()?;
|
||||
match result {
|
||||
turso_core::StepResult::Row => {
|
||||
let row = stmt.row().unwrap();
|
||||
break row;
|
||||
}
|
||||
turso_core::StepResult::IO => {
|
||||
stmt.run_once()?;
|
||||
continue;
|
||||
}
|
||||
|
||||
turso_core::StepResult::Done => break 'outer,
|
||||
r => panic!("unexpected result {r:?}: expecting single row"),
|
||||
}
|
||||
};
|
||||
let row = row
|
||||
.get_values()
|
||||
.map(|x| match x {
|
||||
turso_core::Value::Null => rusqlite::types::Value::Null,
|
||||
turso_core::Value::Integer(x) => rusqlite::types::Value::Integer(*x),
|
||||
turso_core::Value::Float(x) => rusqlite::types::Value::Real(*x),
|
||||
turso_core::Value::Text(x) => rusqlite::types::Value::Text(x.as_str().to_string()),
|
||||
turso_core::Value::Blob(x) => rusqlite::types::Value::Blob(x.to_vec()),
|
||||
})
|
||||
.collect();
|
||||
rows.push(row);
|
||||
}
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
pub(crate) fn limbo_exec_rows_error(
|
||||
_db: &TempDatabase,
|
||||
conn: &Arc<turso_core::Connection>,
|
||||
|
||||
@@ -10,7 +10,10 @@ mod tests {
|
||||
use rusqlite::{params, types::Value};
|
||||
|
||||
use crate::{
|
||||
common::{limbo_exec_rows, rng_from_time, sqlite_exec_rows, TempDatabase},
|
||||
common::{
|
||||
limbo_exec_rows, limbo_exec_rows_fallible, rng_from_time, sqlite_exec_rows,
|
||||
TempDatabase,
|
||||
},
|
||||
fuzz::grammar_generator::{const_str, rand_int, rand_str, GrammarGenerator},
|
||||
};
|
||||
|
||||
@@ -504,6 +507,136 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Create a table with a random number of columns and indexes, and then randomly update or delete rows from the table.
|
||||
/// Verify that the results are the same for SQLite and Turso.
|
||||
pub fn table_index_mutation_fuzz() {
|
||||
let _ = env_logger::try_init();
|
||||
let (mut rng, seed) = rng_from_time();
|
||||
println!("index_scan_single_key_mutation_fuzz seed: {seed}");
|
||||
|
||||
const OUTER_ITERATIONS: usize = 30;
|
||||
for i in 0..OUTER_ITERATIONS {
|
||||
println!(
|
||||
"table_index_mutation_fuzz iteration {}/{}",
|
||||
i + 1,
|
||||
OUTER_ITERATIONS
|
||||
);
|
||||
let limbo_db = TempDatabase::new_empty(true);
|
||||
let sqlite_db = TempDatabase::new_empty(true);
|
||||
let num_cols = rng.random_range(1..=10);
|
||||
let table_def = (0..num_cols)
|
||||
.map(|i| format!("c{i} INTEGER"))
|
||||
.collect::<Vec<_>>();
|
||||
let table_def = table_def.join(", ");
|
||||
let table_def = format!("CREATE TABLE t ({table_def})");
|
||||
|
||||
let num_indexes = rng.random_range(0..=num_cols);
|
||||
let indexes = (0..num_indexes)
|
||||
.map(|i| format!("CREATE INDEX idx_{i} ON t(c{i})"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Create tables and indexes in both databases
|
||||
let limbo_conn = limbo_db.connect_limbo();
|
||||
limbo_exec_rows(&limbo_db, &limbo_conn, &table_def);
|
||||
for t in indexes.iter() {
|
||||
limbo_exec_rows(&limbo_db, &limbo_conn, t);
|
||||
}
|
||||
|
||||
let sqlite_conn = rusqlite::Connection::open(sqlite_db.path.clone()).unwrap();
|
||||
sqlite_conn.execute(&table_def, params![]).unwrap();
|
||||
for t in indexes.iter() {
|
||||
sqlite_conn.execute(t, params![]).unwrap();
|
||||
}
|
||||
|
||||
// Generate initial data
|
||||
let num_inserts = rng.random_range(10..=1000);
|
||||
let mut tuples = HashSet::new();
|
||||
while tuples.len() < num_inserts {
|
||||
tuples.insert(
|
||||
(0..num_cols)
|
||||
.map(|_| rng.random_range(0..1000))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
let mut insert_values = Vec::new();
|
||||
for tuple in tuples {
|
||||
insert_values.push(format!(
|
||||
"({})",
|
||||
tuple
|
||||
.iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
));
|
||||
}
|
||||
// Track executed statements in case we fail
|
||||
let mut dml_statements = Vec::new();
|
||||
let insert = format!("INSERT INTO t VALUES {}", insert_values.join(", "));
|
||||
dml_statements.push(insert.clone());
|
||||
|
||||
// Insert initial data into both databases
|
||||
sqlite_conn.execute(&insert, params![]).unwrap();
|
||||
limbo_exec_rows(&limbo_db, &limbo_conn, &insert);
|
||||
|
||||
const COMPARISONS: [&str; 3] = ["=", "<", ">"];
|
||||
const INNER_ITERATIONS: usize = 100;
|
||||
|
||||
for _ in 0..INNER_ITERATIONS {
|
||||
let do_update = rng.random_range(0..2) == 0;
|
||||
|
||||
let comparison = COMPARISONS[rng.random_range(0..COMPARISONS.len())];
|
||||
let affected_col = rng.random_range(0..num_cols);
|
||||
let predicate_col = rng.random_range(0..num_cols);
|
||||
let predicate_value = rng.random_range(0..1000);
|
||||
|
||||
let query = if do_update {
|
||||
let new_y = rng.random_range(0..1000);
|
||||
format!("UPDATE t SET c{affected_col} = {new_y} WHERE c{predicate_col} {comparison} {predicate_value}")
|
||||
} else {
|
||||
format!("DELETE FROM t WHERE c{predicate_col} {comparison} {predicate_value}")
|
||||
};
|
||||
|
||||
dml_statements.push(query.clone());
|
||||
|
||||
// Execute on both databases
|
||||
sqlite_conn.execute(&query, params![]).unwrap();
|
||||
let limbo_res = limbo_exec_rows_fallible(&limbo_db, &limbo_conn, &query);
|
||||
if let Err(e) = &limbo_res {
|
||||
// print all the DDL and DML statements
|
||||
println!("{table_def};");
|
||||
for t in indexes.iter() {
|
||||
println!("{t};");
|
||||
}
|
||||
for t in dml_statements.iter() {
|
||||
println!("{t};");
|
||||
}
|
||||
panic!("Error executing query: {e}");
|
||||
}
|
||||
|
||||
// Verify results match exactly
|
||||
let verify_query = format!(
|
||||
"SELECT * FROM t ORDER BY {}",
|
||||
(0..num_cols)
|
||||
.map(|i| format!("c{i}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
let sqlite_rows = sqlite_exec_rows(&sqlite_conn, &verify_query);
|
||||
let limbo_rows = limbo_exec_rows(&limbo_db, &limbo_conn, &verify_query);
|
||||
|
||||
assert_eq!(
|
||||
sqlite_rows, limbo_rows,
|
||||
"Different results after mutation! limbo: {limbo_rows:?}, sqlite: {sqlite_rows:?}, seed: {seed}, query: {query}",
|
||||
);
|
||||
|
||||
if sqlite_rows.is_empty() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn compound_select_fuzz() {
|
||||
let _ = env_logger::try_init();
|
||||
|
||||
Reference in New Issue
Block a user