mirror of
https://github.com/aljazceru/turso.git
synced 2026-02-19 23:15:28 +01:00
Merge remote-tracking branch 'origin/main'
This commit is contained in:
190
.github/workflows/napi-sync.yml
vendored
190
.github/workflows/napi-sync.yml
vendored
@@ -1,190 +0,0 @@
|
||||
name: Build & publish @tursodatabase/sync
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- v*
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
DEBUG: napi:*
|
||||
APP_NAME: turso-sync-js
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.13"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: sync/javascript
|
||||
|
||||
jobs:
|
||||
build:
|
||||
timeout-minutes: 20
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
settings:
|
||||
- host: windows-latest
|
||||
build: |
|
||||
yarn build --target x86_64-pc-windows-msvc
|
||||
yarn test
|
||||
target: x86_64-pc-windows-msvc
|
||||
- host: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
build: yarn build --target x86_64-unknown-linux-gnu
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
build: yarn build --target aarch64-apple-darwin
|
||||
- host: blacksmith-2vcpu-ubuntu-2404-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
build: yarn build --target aarch64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: wasm32-wasip1-threads
|
||||
setup: |
|
||||
rustup target add wasm32-wasip1-threads
|
||||
wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
tar -xvf wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
build: |
|
||||
export WASI_SDK_PATH="$(pwd)/wasi-sdk-25.0-x86_64-linux"
|
||||
export CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)
|
||||
export TARGET_CXXFLAGS="--target=wasm32-wasi-threads --sysroot=$(pwd)/wasi-sdk-25.0-x86_64-linux/share/wasi-sysroot -pthread -mllvm -wasm-enable-sjlj -lsetjmp"
|
||||
export TARGET_CFLAGS="$TARGET_CXXFLAGS"
|
||||
yarn build --target wasm32-wasip1-threads
|
||||
name: stable - ${{ matrix.settings.target }} - node@20
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.settings.target }}
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry/index/
|
||||
~/.cargo/registry/cache/
|
||||
~/.cargo/git/db/
|
||||
.cargo-cache
|
||||
target/
|
||||
key: ${{ matrix.settings.target }}-cargo-${{ matrix.settings.host }}
|
||||
- uses: mlugg/setup-zig@v2
|
||||
if: ${{ matrix.settings.target == 'armv7-unknown-linux-gnueabihf' || matrix.settings.target == 'armv7-unknown-linux-musleabihf' }}
|
||||
with:
|
||||
version: 0.13.0
|
||||
- name: Setup toolchain
|
||||
run: ${{ matrix.settings.setup }}
|
||||
if: ${{ matrix.settings.setup }}
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Setup node x86
|
||||
uses: actions/setup-node@v4
|
||||
if: matrix.settings.target == 'x86_64-pc-windows-msvc'
|
||||
with:
|
||||
node-version: 20
|
||||
architecture: x64
|
||||
- name: Build in docker
|
||||
uses: addnab/docker-run-action@v3
|
||||
if: ${{ matrix.settings.docker }}
|
||||
with:
|
||||
image: ${{ matrix.settings.docker }}
|
||||
options: "--user 0:0 -v ${{ github.workspace }}/.cargo-cache/git/db:/usr/local/cargo/git/db -v ${{ github.workspace }}/.cargo/registry/cache:/usr/local/cargo/registry/cache -v ${{ github.workspace }}/.cargo/registry/index:/usr/local/cargo/registry/index -v ${{ github.workspace }}:/build -w /build/sync/javascript"
|
||||
run: ${{ matrix.settings.build }}
|
||||
- name: Build
|
||||
run: ${{ matrix.settings.build }}
|
||||
if: ${{ !matrix.settings.docker }}
|
||||
shell: bash
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: turso-sync-js-${{ matrix.settings.target }}
|
||||
path: |
|
||||
sync/javascript/${{ env.APP_NAME }}.*.node
|
||||
sync/javascript/${{ env.APP_NAME }}.*.wasm
|
||||
if-no-files-found: error
|
||||
test-linux-x64-gnu-binding:
|
||||
name: Test turso-sync-js on Linux-x64-gnu - node@${{ matrix.node }}
|
||||
needs:
|
||||
- build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node:
|
||||
- "20"
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup node
|
||||
uses: useblacksmith/setup-node@v5
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: turso-sync-js-x86_64-unknown-linux-gnu
|
||||
path: sync/javascript
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
- name: Test turso-sync-js
|
||||
run: docker run --rm -v $(pwd):/build -w /build node:${{ matrix.node }}-slim yarn test
|
||||
publish:
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
needs:
|
||||
- test-linux-x64-gnu-binding
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup node
|
||||
uses: useblacksmith/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: create npm dirs
|
||||
run: yarn napi create-npm-dirs
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: sync/javascript/artifacts
|
||||
- name: Move artifacts
|
||||
run: yarn artifacts
|
||||
- name: List packages
|
||||
run: ls -R ./npm
|
||||
shell: bash
|
||||
- name: Publish
|
||||
run: |
|
||||
npm config set provenance true
|
||||
if git log -1 --pretty=%B | grep "^Turso [0-9]\+\.[0-9]\+\.[0-9]\+$";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
make publish-native
|
||||
make publish-browser
|
||||
elif git log -1 --pretty=%B | grep "^Turso [0-9]\+\.[0-9]\+\.[0-9]\+";
|
||||
then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
make publish-native-next
|
||||
make publish-browser-next
|
||||
else
|
||||
echo "Not a release, skipping publish"
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
68
.github/workflows/napi.yml
vendored
68
.github/workflows/napi.yml
vendored
@@ -32,19 +32,41 @@ jobs:
|
||||
settings:
|
||||
- host: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
artifact: db-bindings-x86_64-pc-windows-msvc
|
||||
build: yarn workspace @tursodatabase/database napi-build --target x86_64-pc-windows-msvc
|
||||
- host: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
artifact: sync-bindings-x86_64-pc-windows-msvc
|
||||
build: yarn workspace @tursodatabase/sync napi-build --target x86_64-pc-windows-msvc
|
||||
- host: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
artifact: db-bindings-x86_64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
build: yarn workspace @tursodatabase/database napi-build --target x86_64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
artifact: sync-bindings-x86_64-unknown-linux-gnu
|
||||
docker: ghcr.io/napi-rs/napi-rs/nodejs-rust:lts-debian
|
||||
build: yarn workspace @tursodatabase/sync napi-build --target x86_64-unknown-linux-gnu
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
artifact: db-bindings-aarch64-apple-darwin
|
||||
build: yarn workspace @tursodatabase/database napi-build --target aarch64-apple-darwin
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
artifact: sync-bindings-aarch64-apple-darwin
|
||||
build: yarn workspace @tursodatabase/sync napi-build --target aarch64-apple-darwin
|
||||
- host: blacksmith-2vcpu-ubuntu-2404-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
artifact: db-bindings-aarch64-unknown-linux-gnu
|
||||
build: yarn workspace @tursodatabase/database napi-build --target aarch64-unknown-linux-gnu
|
||||
- host: blacksmith-2vcpu-ubuntu-2404-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
artifact: sync-bindings-aarch64-unknown-linux-gnu
|
||||
build: yarn workspace @tursodatabase/sync napi-build --target aarch64-unknown-linux-gnu
|
||||
- host: ubuntu-latest
|
||||
target: wasm32-wasip1-threads
|
||||
artifact: db-bindings-wasm32-wasip1-threads
|
||||
setup: |
|
||||
rustup target add wasm32-wasip1-threads
|
||||
wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
@@ -55,6 +77,19 @@ jobs:
|
||||
export TARGET_CXXFLAGS="--target=wasm32-wasi-threads --sysroot=$(pwd)/wasi-sdk-25.0-x86_64-linux/share/wasi-sysroot -pthread -mllvm -wasm-enable-sjlj -lsetjmp"
|
||||
export TARGET_CFLAGS="$TARGET_CXXFLAGS"
|
||||
yarn workspace @tursodatabase/database-browser build
|
||||
- host: ubuntu-latest
|
||||
target: wasm32-wasip1-threads
|
||||
artifact: sync-bindings-wasm32-wasip1-threads
|
||||
setup: |
|
||||
rustup target add wasm32-wasip1-threads
|
||||
wget https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
tar -xvf wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
build: |
|
||||
export WASI_SDK_PATH="$(pwd)/wasi-sdk-25.0-x86_64-linux"
|
||||
export CMAKE_BUILD_PARALLEL_LEVEL=$(nproc)
|
||||
export TARGET_CXXFLAGS="--target=wasm32-wasi-threads --sysroot=$(pwd)/wasi-sdk-25.0-x86_64-linux/share/wasi-sysroot -pthread -mllvm -wasm-enable-sjlj -lsetjmp"
|
||||
export TARGET_CFLAGS="$TARGET_CXXFLAGS"
|
||||
yarn workspace @tursodatabase/sync-browser build
|
||||
name: stable - ${{ matrix.settings.target }} - node@20
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
@@ -112,13 +147,15 @@ jobs:
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bindings-${{ matrix.settings.target }}
|
||||
name: ${{ matrix.settings.artifact }}
|
||||
path: |
|
||||
bindings/javascript/packages/native/${{ env.APP_NAME }}.*.node
|
||||
bindings/javascript/packages/browser/${{ env.APP_NAME }}.*.wasm
|
||||
bindings/javascript/packages/native/turso.*.node
|
||||
bindings/javascript/packages/browser/turso.*.wasm
|
||||
bindings/javascript/sync/packages/native/sync.*.node
|
||||
bindings/javascript/sync/packages/browser/sync.*.wasm
|
||||
if-no-files-found: error
|
||||
test-linux-x64-gnu-binding:
|
||||
name: Test bindings on Linux-x64-gnu - node@${{ matrix.node }}
|
||||
test-db-linux-x64-gnu-binding:
|
||||
name: Test DB bindings on Linux-x64-gnu - node@${{ matrix.node }}
|
||||
needs:
|
||||
- build
|
||||
strategy:
|
||||
@@ -137,11 +174,12 @@ jobs:
|
||||
run: yarn install
|
||||
- name: Build common
|
||||
run: yarn workspace @tursodatabase/database-common build
|
||||
- name: Download all artifacts
|
||||
- name: Download all DB artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bindings/javascript/packages
|
||||
path: bindings/javascript
|
||||
merge-multiple: true
|
||||
pattern: 'db*'
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
@@ -154,18 +192,28 @@ jobs:
|
||||
contents: read
|
||||
id-token: write
|
||||
needs:
|
||||
- test-linux-x64-gnu-binding
|
||||
- test-db-linux-x64-gnu-binding
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup node
|
||||
uses: useblacksmith/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Download all artifacts
|
||||
- name: Download all DB artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bindings/javascript/packages
|
||||
path: bindings/javascript
|
||||
merge-multiple: true
|
||||
pattern: 'db*'
|
||||
- name: Download all sync artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bindings/javascript
|
||||
merge-multiple: true
|
||||
pattern: 'sync*'
|
||||
- name: List packages
|
||||
run: ls -R .
|
||||
shell: bash
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
- name: Install dependencies
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -44,3 +44,4 @@ profile.json.gz
|
||||
simulator-output/
|
||||
|
||||
&1
|
||||
bisected.sql
|
||||
180
CHANGELOG.md
180
CHANGELOG.md
@@ -1,5 +1,185 @@
|
||||
# Changelog
|
||||
|
||||
## 0.1.5 -- 2025-09-10
|
||||
|
||||
### Added
|
||||
|
||||
* add missing module type for browser package (Nikita Sivukhin)
|
||||
* Implement 2-args json_each (Mikaël Francoeur)
|
||||
* Add OPFS support to JavaScript bindings (Nikita Sivukhin)
|
||||
* test/fuzz: add UPDATE/DELETE fuzz test (Jussi Saurio)
|
||||
* add gen-bigass-database.py (Jussi Saurio)
|
||||
* Add assertion: we read a page with the correct id (Jussi Saurio)
|
||||
* support float without fractional part (Lâm Hoàng Phúc)
|
||||
* expr: use more efficient implementation for binary condition exprs (Jussi Saurio)
|
||||
* Add json_each table-valued function (1-arg only) (Mikaël Francoeur)
|
||||
* Add io_uring support to stress (Pekka Enberg)
|
||||
* Refactor LIMIT/OFFSET handling to support expressions (bit-aloo)
|
||||
* Encryption: add support for other AEGIS and AES-GCM cipher variants (Frank Denis)
|
||||
* introduce package.json for separate *-browser package (both database and sync) (Nikita Sivukhin)
|
||||
* introduce `eq/contains/starts_with/ends_with_ignore_ascii_case` macros (Lâm Hoàng Phúc)
|
||||
* introduce `match_ignore_ascii_case` macro (Lâm Hoàng Phúc)
|
||||
* core: Make strict schema support experimental (Pekka Enberg)
|
||||
* core/printf: support for more basic substitution types (Luiz Gustavo)
|
||||
* Return sqlite_version() without being initialized (Preston Thorpe)
|
||||
* Support encryption for raw WAL frames (Gaurav Sarma)
|
||||
* bindings/java: Implement date, time related methods under JDBC4PreparedStatement (Kim Seon Woo)
|
||||
* Support cipher and encryption key URI options (William Souza)
|
||||
* Implement UPSERT (Preston Thorpe)
|
||||
* CLI: implement `Line` output .mode (Andrey Oskin)
|
||||
* add sqlite integrity check back (Pedro Muniz)
|
||||
* core: Initial pass on synchronous pragma (Pekka Enberg)
|
||||
* Introduce and propagate `IOContext` as required (Avinash Sajjanshetty)
|
||||
* Add some docs on encryption (Avinash Sajjanshetty)
|
||||
* sqlite3: Implement sqlite3_malloc() and sqlite3_free() (Pekka Enberg)
|
||||
* sqlite3: Implement sqlite3_next_stmt() (Pekka Enberg)
|
||||
* core/translate: Add support (Pekka Enberg)
|
||||
* sqlite3: Implement sqlite3_db_filename() (Pekka Enberg)
|
||||
* flake.nix: add uv dependency to nativeBuildInputs (Ceferino Patino)
|
||||
* sqlite3: Implement sqlite3_bind_parameter_index() (Pekka Enberg)
|
||||
* sqlite3: Implement sqlite3_clear_bindings() (Pekka Enberg)
|
||||
* sqlite3: Implement sqlite3_get_autocommit() (Pekka Enberg)
|
||||
* Add support for AEGIS encryption algorithm (Avinash Sajjanshetty)
|
||||
* bindings/java: Implement batch operations for JDBC4Statement (Kim Seon Woo)
|
||||
* Add syntax highlighting for EXPLAIN and ANALYZE (Alex Miller)
|
||||
* Add basic support for ANALYZE statement (Alex Miller)
|
||||
* correctly implement offset() in parser (Lâm Hoàng Phúc)
|
||||
* Switch to new parser in core (Levy A.)
|
||||
* github: Remove Intel Mac support (Pekka Enberg)
|
||||
* add remove_file method to the IO (Nikita Sivukhin)
|
||||
* Add libc fault injection to Antithesis (Pekka Enberg)
|
||||
* core/mvcc: support for MVCC (Pere Diaz Bou)
|
||||
* SQLite C API improvements: add column type and column decltype (Danawan Bimantoro)
|
||||
* Initial pass to support per page encryption (Avinash Sajjanshetty)
|
||||
|
||||
### Updated
|
||||
* clean `print_query_result` (Lâm Hoàng Phúc)
|
||||
* update update-script to properly handle JS workspace (Nikita Sivukhin)
|
||||
* no need `QueryStatistics` if `self.opts.timer` is not set (Lâm Hoàng Phúc)
|
||||
* optimizer: convert outer join to inner join if possible (Jussi Saurio)
|
||||
* Handle case where null flag is set in op_column (Jussi Saurio)
|
||||
* remove &1 (Lâm Hoàng Phúc)
|
||||
* reduce cloning `Arc<Page>` (Lâm Hoàng Phúc)
|
||||
* Evaluate left join seek key condition again after null row (Jussi Saurio)
|
||||
* use mlugg/setup-zig instead of unmaintained action (Kingsword)
|
||||
* Prevent setting of encryption keys if already set (Gaurav Sarma)
|
||||
* Remove RefCell from Cursor (Pedro Muniz)
|
||||
* Page Cache: optimize and use sieve/Gclock hybird algorithm in place of LRU (Preston Thorpe)
|
||||
* core: handle edge cases for read_varint (Sonny)
|
||||
* Persistence for DBSP-based materialized views (Glauber Costa)
|
||||
* io_uring: prevent out of order operations that could interfere with durability (Preston Thorpe)
|
||||
* core: Simplify WalFileShared life cycle (Pekka Enberg)
|
||||
* prevent modification to system tables. (Glauber Costa)
|
||||
* mark completion as done only after callback will be executed (Nikita Sivukhin)
|
||||
* core/mvcc: make commit_txn return on I/O (Pere Diaz Bou)
|
||||
* windows iterator returns no values for shorter slice (Lâm Hoàng Phúc)
|
||||
* Unify resolution of aggregate functions (Piotr Rżysko)
|
||||
* replace some matches with `match_ignore_ascii_case` macro (Lâm Hoàng Phúc)
|
||||
* Make io_uring sound for connections on multiple threads (Preston Thorpe)
|
||||
* build native package for ARM64 (Nikita Sivukhin)
|
||||
* refactor parser fmt (Lâm Hoàng Phúc)
|
||||
* string sometimes used as identifier quoting (Lâm Hoàng Phúc)
|
||||
* CURRENT_TIMESTAMP can fallback TK_ID (Lâm Hoàng Phúc)
|
||||
* remove `turso_sqlite3_parser` from `turso_parser` (Lâm Hoàng Phúc)
|
||||
* Simulate I/O in memory (Pedro)
|
||||
* Simulate I/O in memory (Pedro Muniz)
|
||||
* Refactor encryption to manage authentication tag internally (bit-aloo)
|
||||
* Unify handling of grouped and ungrouped aggregations (Piotr Rżysko)
|
||||
* Evict page from cache if page is unlocked and unloaded (Pedro Muniz)
|
||||
* Use u64 for file offsets in I/O and calculate such offsets in u64 (Preston Thorpe)
|
||||
* Document how to use CDC (Pavan Nambi)
|
||||
* Upgrade Rust version in simulator build Dockerfile (Preston Thorpe)
|
||||
* Parse booleans to integer literals in expressions (Preston Thorpe)
|
||||
* Simulator Profiles (Pedro Muniz)
|
||||
* Change views to use DBSP circuits (Glauber Costa)
|
||||
* core/wal: cache file size (Pere Diaz Bou)
|
||||
* Remove some code duplication in the CLI (Preston Thorpe)
|
||||
* core/translate: parse_table remove unnecessary clone of table name (Pere Diaz Bou)
|
||||
* Update COMPAT.md to remove CREATE INDEX default disabled (Preston Thorpe)
|
||||
* core/translate: remove unneessary agg clones (Pere Diaz Bou)
|
||||
* core/vdbe: Micro-optimize "zero_or_null" opcode (Pekka Enberg)
|
||||
* translate: with_capacity insns (Pere Diaz Bou)
|
||||
* perf: avoid constructing PageType in helper methods (Jussi Saurio)
|
||||
* refactor/perf: remove BTreePageInner (Jussi Saurio)
|
||||
* Improve integrity check (Nikita Sivukhin)
|
||||
* translate/insert: Improve string format performance (Pere Diaz Bou)
|
||||
* core/schema: get_dependent_materialized_views_unnormalized (Pere Diaz Bou)
|
||||
* core/util: emit literal, cow instead of replace (Pere Diaz Bou)
|
||||
* core/translate: sanize_string fast path improvement (Pere Diaz Bou)
|
||||
* core/io: Switch Unix I/O to use libc::pwrite() (Pekka Enberg)
|
||||
* Update README.md for Go documentation (Preston Thorpe)
|
||||
* improve sync engine (Nikita Sivukhin)
|
||||
* Remove Go bindings (Preston Thorpe)
|
||||
* core/storage: Micro-optimize Pager::commit_dirty_pages() (Pekka Enberg)
|
||||
* Rename Go driver to `turso` to not conflict with sqlite3 (Preston Thorpe)
|
||||
* Refactor: `Cell` instead of `RefCell` to store `CipherMode` in connection (Avinash Sajjanshetty)
|
||||
* Improve documentation of page pinning (Jussi Saurio)
|
||||
* Remove double indirection in the Parser (Pedro Muniz")
|
||||
* Fail CI run if Turso output differs from SQLite in TPC-H queries (Jussi Saurio)
|
||||
* Decouple SQL generation from Simulator crate (Pedro Muniz)
|
||||
* Make fill_cell_payload() safe for async IO and cache spilling (Jussi Saurio)
|
||||
* Remove Windows IO in place of Generic IO (Preston Thorpe)
|
||||
* Improve encryption API (Avinash Sajjanshetty)
|
||||
* Remove double indirection in the Parser (Pedro Muniz)
|
||||
* Update TPC-H running instructions in PERF.md (Alex Miller)
|
||||
* Truncate the WAL on last connection close (Preston Thorpe)
|
||||
* DBSP projection (Pekka Enberg)
|
||||
* Use vectored I/O for appending WAL frames (Preston Thorpe)
|
||||
* Remove unnecessary argument from Pager::end_tx() (Nikita Sivukhin)
|
||||
* refactor/btree: rewrite the find_free_cell() function (Jussi Saurio)
|
||||
* refactor/btree: rewrite the free_cell_range() function (Jussi Saurio)
|
||||
* Remove Result from signature (Mikaël Francoeur)
|
||||
* Remove duplicated attribute in (bit-aloo)
|
||||
* reduce cloning Token in parser (Lâm Hoàng Phúc)
|
||||
* refactor encryption module and make it configurable (Avinash Sajjanshetty)
|
||||
* Replace a couple refcells for types that trivially impl Copy (Preston Thorpe)
|
||||
* wal-api: allow to mix frames insert with SQL execution (Nikita Sivukhin)
|
||||
* move check code into parser (Lâm Hoàng Phúc)
|
||||
* Serialize compat tests and use Mutex::lock() instead of Mutex::try_lock() in UnixIO (Jussi Saurio)
|
||||
* sim: remove "run_once faults" (Jussi Saurio)
|
||||
* should not return a Completion when there is a page cache hit (Pedro Muniz)
|
||||
* github: Reduce Python build matrix (Pekka Enberg)
|
||||
* Page cache truncate (Nikita Sivukhin)
|
||||
* Wal api checkpoint seq (Nikita Sivukhin)
|
||||
* Use more structured approach in translate_insert (Jussi Saurio)
|
||||
* Remove hardcoded flag usage in DBHeader for encryption (Avinash Sajjanshetty)
|
||||
* properly execute pragmas - they may require some IO (Nikita Sivukhin)
|
||||
* Wal checkpoint upper bound (Nikita Sivukhin)
|
||||
* Improve WAL checkpointing performance (Preston Thorpe)
|
||||
* core/mvcc: store txid in conn and reset transaction state on commit (Pere Diaz Bou)
|
||||
* core/mvcc: start first rowid at 1 (Pere Diaz Bou)
|
||||
* refactor/vdbe: move insert-related seeking to VDBE from BTreeCursor (Jussi Saurio)
|
||||
|
||||
### Fixed
|
||||
* Fix clear_page_cache method and rollback (Preston Thorpe)
|
||||
* Fix read_entire_wal_dumb: incrementally build the frame cache (Preston Thorpe)
|
||||
* Fix merge script to prompt if tests are still in progress (Preston Thorpe)
|
||||
* SQL generation fixes (Pekka Enberg)
|
||||
* Fix affinity handling in MakeRecord (Pekka Enberg)
|
||||
* Fix infinite loop when IO failure happens on allocating first page (Preston Thorpe)
|
||||
* Fix crash in Next opcode if cursor stack has no pages (Jussi Saurio)
|
||||
* cli: Fix dump compatibility in "PRAGMA foreign_keys" (Pekka Enberg)
|
||||
* Small fixes (Nikita Sivukhin)
|
||||
* Avoid allocating and then immediately fallbacking errors in affinity (Jussi Saurio)
|
||||
* Fix float formatting and comparison + Blob concat (Levy A.)
|
||||
* Fix infinite loop when query starts comment token ("--") (Lâm Hoàng Phúc)
|
||||
* Fix sqlite3 test cases (Pekka Enberg)
|
||||
* Fix non-determinism in simulator (Pedro Muniz)
|
||||
* Fix column count in ImmutableRow (Glauber Costa)
|
||||
* Fix memory leak in page cache during balancing (Preston Thorpe)
|
||||
* Fix `sim-schema` command (Pedro Muniz)
|
||||
* Propagate decryption error from the callback (Avinash Sajjanshetty)
|
||||
* Fix sorter column deduplication (Piotr Rżysko)
|
||||
* Fix missing functions after revert (Pedro Muniz)
|
||||
* ci: fix merge-pr issue to escape command-line backticks (Ceferino Patino)
|
||||
* Fix several issues with integrity_check (Jussi Saurio)
|
||||
* core/io: Fix build on Android and iOS (Pekka Enberg)
|
||||
* WAL txn: fix reads from DB file (Nikita Sivukhin)
|
||||
* Fix blob type handling in JavaScript (Pekka Enberg)
|
||||
* Fix: all indexes need to be updated if the rowid changes (Jussi Saurio)
|
||||
* Fix: in UPDATE, insert rowid into index instead of NULL (Jussi Saurio)
|
||||
* Fix: normalize table name in DELETE (Jussi Saurio)
|
||||
|
||||
## 0.1.4 -- 2025-08-20
|
||||
|
||||
### Added
|
||||
|
||||
@@ -343,7 +343,7 @@ Modifiers:
|
||||
| TimeOffset | Yes | |
|
||||
| DateOffset | Yes | |
|
||||
| DateTimeOffset | Yes | |
|
||||
| Ceiling | No | |
|
||||
| Ceiling | Yes | |
|
||||
| Floor | No | |
|
||||
| StartOfMonth | Yes | |
|
||||
| StartOfYear | Yes | |
|
||||
|
||||
70
Cargo.lock
generated
70
Cargo.lock
generated
@@ -104,12 +104,6 @@ dependencies = [
|
||||
"backtrace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "anarchist-readable-name-generator-lib"
|
||||
version = "0.1.2"
|
||||
@@ -667,7 +661,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||
|
||||
[[package]]
|
||||
name = "core_tester"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
@@ -1554,8 +1548,6 @@ version = "0.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
@@ -2126,7 +2118,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_completion"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"turso_ext",
|
||||
@@ -2134,7 +2126,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_crypto"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"blake3",
|
||||
"data-encoding",
|
||||
@@ -2147,7 +2139,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_csv"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"csv",
|
||||
"mimalloc",
|
||||
@@ -2157,7 +2149,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_ipaddr"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"ipnetwork",
|
||||
"mimalloc",
|
||||
@@ -2166,7 +2158,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_percentile"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"turso_ext",
|
||||
@@ -2174,7 +2166,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_regexp"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"mimalloc",
|
||||
"regex",
|
||||
@@ -2183,7 +2175,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_sim"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@@ -2216,7 +2208,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "limbo_sqlite_test_ext"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
@@ -2281,15 +2273,6 @@ version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
|
||||
[[package]]
|
||||
name = "lru"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f8cc7106155f10bdf99a6f379688f543ad6596a415375b36a59a054ceda1198"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
@@ -2971,7 +2954,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "py-turso"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pyo3",
|
||||
@@ -3666,7 +3649,7 @@ checksum = "d372029cb5195f9ab4e4b9aef550787dce78b124fcaee8d82519925defcd6f0d"
|
||||
|
||||
[[package]]
|
||||
name = "sql_generation"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anarchist-readable-name-generator-lib 0.2.0",
|
||||
"anyhow",
|
||||
@@ -4176,7 +4159,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
@@ -4188,7 +4171,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso-java"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"jni",
|
||||
"thiserror 2.0.12",
|
||||
@@ -4197,7 +4180,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_cli"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@@ -4230,7 +4213,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_core"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"aegis",
|
||||
"aes",
|
||||
@@ -4252,7 +4235,6 @@ dependencies = [
|
||||
"libc",
|
||||
"libloading",
|
||||
"libm",
|
||||
"lru",
|
||||
"memory-stats",
|
||||
"miette",
|
||||
"mimalloc",
|
||||
@@ -4289,7 +4271,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_dart"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"flutter_rust_bridge",
|
||||
"turso_core",
|
||||
@@ -4297,7 +4279,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_ext"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"getrandom 0.3.2",
|
||||
@@ -4306,7 +4288,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_ext_tests"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"env_logger 0.11.7",
|
||||
"lazy_static",
|
||||
@@ -4317,7 +4299,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_macros"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4326,7 +4308,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_node"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"napi",
|
||||
"napi-build",
|
||||
@@ -4338,7 +4320,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_parser"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"criterion",
|
||||
@@ -4354,7 +4336,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sqlite3"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"env_logger 0.11.7",
|
||||
"libc",
|
||||
@@ -4367,7 +4349,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sqlite3_parser"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"cc",
|
||||
@@ -4385,7 +4367,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_stress"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"anarchist-readable-name-generator-lib 0.1.2",
|
||||
"antithesis_sdk",
|
||||
@@ -4401,7 +4383,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sync_engine"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
@@ -4427,7 +4409,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sync_js"
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
dependencies = [
|
||||
"genawaiter",
|
||||
"http",
|
||||
|
||||
36
Cargo.toml
36
Cargo.toml
@@ -6,6 +6,7 @@ members = [
|
||||
"bindings/dart/rust",
|
||||
"bindings/java",
|
||||
"bindings/javascript",
|
||||
"bindings/javascript/sync",
|
||||
"bindings/python",
|
||||
"bindings/rust",
|
||||
"cli",
|
||||
@@ -27,35 +28,34 @@ members = [
|
||||
"vendored/sqlite3-parser/sqlparser_bench",
|
||||
"parser",
|
||||
"sync/engine",
|
||||
"sync/javascript",
|
||||
"sql_generation",
|
||||
]
|
||||
exclude = ["perf/latency/limbo"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.5-pre.5"
|
||||
version = "0.2.0-pre.1"
|
||||
authors = ["the Limbo authors"]
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/tursodatabase/turso"
|
||||
|
||||
[workspace.dependencies]
|
||||
turso = { path = "bindings/rust", version = "0.1.5-pre.5" }
|
||||
turso_node = { path = "bindings/javascript", version = "0.1.5-pre.5" }
|
||||
limbo_completion = { path = "extensions/completion", version = "0.1.5-pre.5" }
|
||||
turso_core = { path = "core", version = "0.1.5-pre.5" }
|
||||
turso_sync_engine = { path = "sync/engine", version = "0.1.5-pre.5" }
|
||||
limbo_crypto = { path = "extensions/crypto", version = "0.1.5-pre.5" }
|
||||
limbo_csv = { path = "extensions/csv", version = "0.1.5-pre.5" }
|
||||
turso_ext = { path = "extensions/core", version = "0.1.5-pre.5" }
|
||||
turso_ext_tests = { path = "extensions/tests", version = "0.1.5-pre.5" }
|
||||
limbo_ipaddr = { path = "extensions/ipaddr", version = "0.1.5-pre.5" }
|
||||
turso_macros = { path = "macros", version = "0.1.5-pre.5" }
|
||||
limbo_percentile = { path = "extensions/percentile", version = "0.1.5-pre.5" }
|
||||
limbo_regexp = { path = "extensions/regexp", version = "0.1.5-pre.5" }
|
||||
turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.1.5-pre.5" }
|
||||
limbo_uuid = { path = "extensions/uuid", version = "0.1.5-pre.5" }
|
||||
turso_parser = { path = "parser", version = "0.1.5-pre.5" }
|
||||
turso = { path = "bindings/rust", version = "0.2.0-pre.1" }
|
||||
turso_node = { path = "bindings/javascript", version = "0.2.0-pre.1" }
|
||||
limbo_completion = { path = "extensions/completion", version = "0.2.0-pre.1" }
|
||||
turso_core = { path = "core", version = "0.2.0-pre.1" }
|
||||
turso_sync_engine = { path = "sync/engine", version = "0.2.0-pre.1" }
|
||||
limbo_crypto = { path = "extensions/crypto", version = "0.2.0-pre.1" }
|
||||
limbo_csv = { path = "extensions/csv", version = "0.2.0-pre.1" }
|
||||
turso_ext = { path = "extensions/core", version = "0.2.0-pre.1" }
|
||||
turso_ext_tests = { path = "extensions/tests", version = "0.2.0-pre.1" }
|
||||
limbo_ipaddr = { path = "extensions/ipaddr", version = "0.2.0-pre.1" }
|
||||
turso_macros = { path = "macros", version = "0.2.0-pre.1" }
|
||||
limbo_percentile = { path = "extensions/percentile", version = "0.2.0-pre.1" }
|
||||
limbo_regexp = { path = "extensions/regexp", version = "0.2.0-pre.1" }
|
||||
turso_sqlite3_parser = { path = "vendored/sqlite3-parser", version = "0.2.0-pre.1" }
|
||||
limbo_uuid = { path = "extensions/uuid", version = "0.2.0-pre.1" }
|
||||
turso_parser = { path = "parser", version = "0.2.0-pre.1" }
|
||||
sql_generation = { path = "sql_generation" }
|
||||
strum = { version = "0.26", features = ["derive"] }
|
||||
strum_macros = "0.26"
|
||||
|
||||
36
Makefile
36
Makefile
@@ -9,7 +9,7 @@ MINIMUM_TCL_VERSION := 8.6
|
||||
SQLITE_EXEC ?= scripts/limbo-sqlite3
|
||||
RUST_LOG := off
|
||||
|
||||
all: check-rust-version limbo
|
||||
all: check-rust-version build
|
||||
.PHONY: all
|
||||
|
||||
check-rust-version:
|
||||
@@ -39,13 +39,13 @@ check-tcl-version:
|
||||
| tclsh
|
||||
.PHONY: check-tcl-version
|
||||
|
||||
limbo:
|
||||
build: check-rust-version
|
||||
cargo build
|
||||
.PHONY: limbo
|
||||
.PHONY: build
|
||||
|
||||
limbo-c:
|
||||
turso-c:
|
||||
cargo cbuild
|
||||
.PHONY: limbo-c
|
||||
.PHONY: turso-c
|
||||
|
||||
uv-sync:
|
||||
uv sync --all-packages
|
||||
@@ -55,14 +55,14 @@ uv-sync-test:
|
||||
uv sync --all-extras --dev --package turso_test
|
||||
.PHONE: uv-sync
|
||||
|
||||
test: limbo uv-sync-test test-compat test-alter-column test-vector test-sqlite3 test-shell test-memory test-write test-update test-constraint test-collate test-extensions test-mvcc test-matviews
|
||||
test: build uv-sync-test test-compat test-alter-column test-vector test-sqlite3 test-shell test-memory test-write test-update test-constraint test-collate test-extensions test-mvcc test-matviews
|
||||
.PHONY: test
|
||||
|
||||
test-extensions: limbo uv-sync-test
|
||||
test-extensions: build uv-sync-test
|
||||
RUST_LOG=$(RUST_LOG) uv run --project limbo_test test-extensions
|
||||
.PHONY: test-extensions
|
||||
|
||||
test-shell: limbo uv-sync-test
|
||||
test-shell: build uv-sync-test
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-shell
|
||||
.PHONY: test-shell
|
||||
|
||||
@@ -100,11 +100,11 @@ test-json:
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) ./testing/json.test
|
||||
.PHONY: test-json
|
||||
|
||||
test-memory: limbo uv-sync-test
|
||||
test-memory: build uv-sync-test
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-memory
|
||||
.PHONY: test-memory
|
||||
|
||||
test-write: limbo uv-sync-test
|
||||
test-write: build uv-sync-test
|
||||
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-write; \
|
||||
else \
|
||||
@@ -112,7 +112,7 @@ test-write: limbo uv-sync-test
|
||||
fi
|
||||
.PHONY: test-write
|
||||
|
||||
test-update: limbo uv-sync-test
|
||||
test-update: build uv-sync-test
|
||||
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-update; \
|
||||
else \
|
||||
@@ -120,7 +120,7 @@ test-update: limbo uv-sync-test
|
||||
fi
|
||||
.PHONY: test-update
|
||||
|
||||
test-collate: limbo uv-sync-test
|
||||
test-collate: build uv-sync-test
|
||||
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-collate; \
|
||||
else \
|
||||
@@ -128,7 +128,7 @@ test-collate: limbo uv-sync-test
|
||||
fi
|
||||
.PHONY: test-collate
|
||||
|
||||
test-constraint: limbo uv-sync-test
|
||||
test-constraint: build uv-sync-test
|
||||
@if [ "$(SQLITE_EXEC)" != "scripts/limbo-sqlite3" ]; then \
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-constraint; \
|
||||
else \
|
||||
@@ -136,22 +136,22 @@ test-constraint: limbo uv-sync-test
|
||||
fi
|
||||
.PHONY: test-constraint
|
||||
|
||||
test-mvcc: limbo uv-sync-test
|
||||
test-mvcc: build uv-sync-test
|
||||
RUST_LOG=$(RUST_LOG) SQLITE_EXEC=$(SQLITE_EXEC) uv run --project limbo_test test-mvcc;
|
||||
.PHONY: test-mvcc
|
||||
|
||||
bench-vfs: uv-sync-test
|
||||
cargo build --release
|
||||
bench-vfs: uv-sync-test build-release
|
||||
RUST_LOG=$(RUST_LOG) uv run --project limbo_test bench-vfs "$(SQL)" "$(N)"
|
||||
|
||||
bench-sqlite: uv-sync-test
|
||||
cargo build --release
|
||||
bench-sqlite: uv-sync-test build-release
|
||||
RUST_LOG=$(RUST_LOG) uv run --project limbo_test bench-sqlite "$(VFS)" "$(SQL)" "$(N)"
|
||||
|
||||
clickbench:
|
||||
./perf/clickbench/benchmark.sh
|
||||
.PHONY: clickbench
|
||||
|
||||
build-release: check-rust-version
|
||||
cargo build --bin tursodb --release --features=tracing_release
|
||||
|
||||
bench-exclude-tpc-h:
|
||||
@benchmarks=$$(cargo bench --bench 2>&1 | grep -A 1000 '^Available bench targets:' | grep -v '^Available bench targets:' | grep -v '^ *$$' | grep -v 'tpc_h_benchmark' | xargs -I {} printf -- "--bench %s " {}); \
|
||||
|
||||
@@ -7,6 +7,9 @@ license.workspace = true
|
||||
repository.workspace = true
|
||||
publish = false
|
||||
|
||||
[features]
|
||||
release = ["turso_core/tracing_release"]
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "staticlib"]
|
||||
|
||||
|
||||
@@ -6,7 +6,8 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
publish = false
|
||||
|
||||
[features]
|
||||
tracing_release = ["turso_core/tracing_release"]
|
||||
[lib]
|
||||
name = "_turso_java"
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
@@ -20,6 +20,6 @@ tracing.workspace = true
|
||||
[features]
|
||||
encryption = ["turso_core/encryption"]
|
||||
browser = []
|
||||
|
||||
tracing_release = ["turso_core/tracing_release"]
|
||||
[build-dependencies]
|
||||
napi-build = "2.2.3"
|
||||
|
||||
272
bindings/javascript/examples/browser/index.html
Normal file
272
bindings/javascript/examples/browser/index.html
Normal file
@@ -0,0 +1,272 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Brutal DB Viewer</title>
|
||||
<style>
|
||||
:root {
|
||||
--fg: #000;
|
||||
--bg: #fff;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
margin: 0 10%;
|
||||
padding: 0;
|
||||
background: var(--bg);
|
||||
color: var(--fg);
|
||||
font: 14px/1.4 ui-monospace, SFMono-Regular, Menlo, Consolas, "Liberation Mono", monospace;
|
||||
}
|
||||
|
||||
header {
|
||||
border-bottom: 2px solid #000;
|
||||
padding: 12px 16px;
|
||||
font-weight: 700;
|
||||
letter-spacing: .03em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
main {
|
||||
padding: 16px;
|
||||
display: grid;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
min-height: 128px;
|
||||
max-height: 60vh;
|
||||
resize: vertical;
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
button {
|
||||
appearance: none;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
border: 1px solid #000;
|
||||
padding: 6px 10px;
|
||||
cursor: pointer;
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
transform: translate(-1px, -1px);
|
||||
box-shadow: 2px 2px 0 #000;
|
||||
}
|
||||
|
||||
button:active {
|
||||
transform: translate(0, 0);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.status {
|
||||
margin-left: auto;
|
||||
opacity: .9;
|
||||
}
|
||||
|
||||
#result {
|
||||
border-top: 2px solid #000;
|
||||
padding-top: 12px;
|
||||
}
|
||||
|
||||
.meta {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.error {
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
margin-bottom: 8px;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.table-wrap {
|
||||
overflow: auto;
|
||||
border: 1px solid #000;
|
||||
max-height: 65vh;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
thead th {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
border: 1px solid #000;
|
||||
padding: 6px 8px;
|
||||
vertical-align: top;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
border: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<header>DB Viewer</header>
|
||||
<main>
|
||||
<section>
|
||||
<label for="sql">Query</label>
|
||||
<textarea id="sql" spellcheck="false" placeholder="SELECT * FROM people;">SELECT 'hello, world';</textarea>
|
||||
<div class="controls">
|
||||
<button id="run" type="button" title="Run (Ctrl/⌘ + Enter)">Run</button>
|
||||
<div class="status" id="status">Ready</div>
|
||||
</div>
|
||||
<div class="sr-only" aria-live="polite" id="live"></div>
|
||||
</section>
|
||||
|
||||
<section id="result">
|
||||
<div class="meta" id="meta">No results yet.</div>
|
||||
<div id="error" class="error" hidden></div>
|
||||
<div class="table-wrap">
|
||||
<table id="table" role="table" aria-label="Query results">
|
||||
<thead></thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<script type="module">
|
||||
import { connect } from "@tursodatabase/database-browser";
|
||||
const db = await connect('data.db');
|
||||
// --- Wire your DB here --------------------------------------------------
|
||||
// Provide window.executeQuery = async (sql) => ({ columns: string[], rows: any[][] })
|
||||
// If not provided, a tiny mock dataset is used for demo purposes.
|
||||
|
||||
(function () {
|
||||
const $ = (sel) => document.querySelector(sel);
|
||||
const sqlEl = $('#sql');
|
||||
const runBtn = $('#run');
|
||||
const statusEl = $('#status');
|
||||
const liveEl = $('#live');
|
||||
const metaEl = $('#meta');
|
||||
const errEl = $('#error');
|
||||
const thead = $('#table thead');
|
||||
const tbody = $('#table tbody');
|
||||
|
||||
function fmt(v) {
|
||||
if (v === null || v === undefined) return 'NULL';
|
||||
if (typeof v === 'object') {
|
||||
try { return JSON.stringify(v); } catch { return String(v); }
|
||||
}
|
||||
return String(v);
|
||||
}
|
||||
|
||||
function clearTable() { thead.innerHTML = ''; tbody.innerHTML = ''; }
|
||||
|
||||
function renderTable(result) {
|
||||
clearTable();
|
||||
const { columns = [], rows = [] } = result || {};
|
||||
|
||||
// Header
|
||||
const trh = document.createElement('tr');
|
||||
for (const name of columns) {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = String(name);
|
||||
trh.appendChild(th);
|
||||
}
|
||||
thead.appendChild(trh);
|
||||
|
||||
// Body
|
||||
const frag = document.createDocumentFragment();
|
||||
for (const r of rows) {
|
||||
const tr = document.createElement('tr');
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
const td = document.createElement('td');
|
||||
td.textContent = fmt(r[i] ?? null);
|
||||
tr.appendChild(td);
|
||||
}
|
||||
frag.appendChild(tr);
|
||||
}
|
||||
tbody.appendChild(frag);
|
||||
|
||||
metaEl.textContent = rows.length
|
||||
? `${rows.length} row${rows.length === 1 ? '' : 's'} × ${columns.length} column${columns.length === 1 ? '' : 's'}`
|
||||
: 'No rows.';
|
||||
}
|
||||
|
||||
async function run(sql) {
|
||||
// errEl.hidden = true; errEl.textContent = '';
|
||||
// statusEl.textContent = 'Running…';
|
||||
let t0 = performance.now();
|
||||
try {
|
||||
for (let i = 0; i < 1; i++) {
|
||||
await db.pingSync();
|
||||
}
|
||||
const res = {};
|
||||
// const stmt = await scheduler.postTask(async () => await db.prepare(sql), { priority: 'user-blocking' });
|
||||
// const columns = await scheduler.postTask(async () => (await stmt.columns()).map(x => x.name), { priority: 'user-blocking' });
|
||||
// const rows = await scheduler.postTask(async () => await stmt.all(), { priority: 'user-blocking' });
|
||||
// const res = {
|
||||
// columns: columns,
|
||||
// rows: rows.map(r => columns.map(c => r[c]))
|
||||
// };
|
||||
const t1 = performance.now();
|
||||
renderTable(res);
|
||||
const took = Math.max(0, t1 - t0);
|
||||
statusEl.textContent = `OK (${took}ms)`;
|
||||
liveEl.textContent = `Query finished in ${took} milliseconds.`;
|
||||
} catch (e) {
|
||||
clearTable();
|
||||
statusEl.textContent = 'ERROR';
|
||||
const msg = (e && (e.message || e.toString())) || 'Unknown error';
|
||||
errEl.textContent = 'ERROR: ' + msg;
|
||||
errEl.hidden = false;
|
||||
liveEl.textContent = 'Query failed.';
|
||||
}
|
||||
}
|
||||
|
||||
runBtn.addEventListener('click', () => run(sqlEl.value));
|
||||
sqlEl.addEventListener('keydown', (e) => {
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
run(sqlEl.value);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial demo run
|
||||
run(sqlEl.value);
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
19
bindings/javascript/examples/browser/package.json
Normal file
19
bindings/javascript/examples/browser/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-browser": "../../browser"
|
||||
}
|
||||
}
|
||||
22
bindings/javascript/examples/browser/vite.config.js
Normal file
22
bindings/javascript/examples/browser/vite.config.js
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.', '../../']
|
||||
},
|
||||
define:
|
||||
{
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false', // string replace at build-time
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
esbuildOptions: {
|
||||
define: { 'process.env.NODE_DEBUG_NATIVE': 'false' },
|
||||
},
|
||||
},
|
||||
})
|
||||
34
bindings/javascript/examples/wasm/index.html
Normal file
34
bindings/javascript/examples/wasm/index.html
Normal file
@@ -0,0 +1,34 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type"/>
|
||||
</head>
|
||||
<body>
|
||||
<button id="run">Run</button>
|
||||
<script type="module">
|
||||
import { Database, opfsSetup } from "@tursodatabase/database";
|
||||
var opfs = await opfsSetup("local.db");
|
||||
console.info(opfs);
|
||||
async function consume() {
|
||||
console.info('take', opfs.take());
|
||||
setTimeout(consume, 1000);
|
||||
}
|
||||
consume();
|
||||
async function tick() {
|
||||
console.info('tick');
|
||||
setTimeout(tick, 1000);
|
||||
}
|
||||
tick();
|
||||
|
||||
async function run() {
|
||||
const db = new Database(opfs);
|
||||
console.info('inited');
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
console.info('created');
|
||||
await db.exec("INSERT INTO t VALUES (1)");
|
||||
console.info('inserted');
|
||||
}
|
||||
document.getElementById("run").onclick = run;
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
19
bindings/javascript/examples/wasm/package.json
Normal file
19
bindings/javascript/examples/wasm/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "../.."
|
||||
}
|
||||
}
|
||||
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
minify: false, // Set this to false to disable minification
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'@tursodatabase/database-wasm32-wasi': '../../turso.wasi-browser.js'
|
||||
},
|
||||
},
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.']
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
exclude: [
|
||||
"@tursodatabase/database-wasm32-wasi",
|
||||
]
|
||||
},
|
||||
})
|
||||
86
bindings/javascript/package-lock.json
generated
86
bindings/javascript/package-lock.json
generated
@@ -1,15 +1,19 @@
|
||||
{
|
||||
"name": "javascript",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"sync/packages/common",
|
||||
"sync/packages/native",
|
||||
"sync/packages/browser"
|
||||
]
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
@@ -1103,10 +1107,26 @@
|
||||
"resolved": "packages/browser",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-browser-common": {
|
||||
"resolved": "packages/browser-common",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-common": {
|
||||
"resolved": "packages/common",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/sync": {
|
||||
"resolved": "sync/packages/native",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/sync-browser": {
|
||||
"resolved": "sync/packages/browser",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/sync-common": {
|
||||
"resolved": "sync/packages/common",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tybys/wasm-util": {
|
||||
"version": "0.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz",
|
||||
@@ -2485,11 +2505,12 @@
|
||||
},
|
||||
"packages/browser": {
|
||||
"name": "@tursodatabase/database-browser",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
"@tursodatabase/database-browser-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
@@ -2499,9 +2520,17 @@
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"packages/browser-common": {
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
},
|
||||
"packages/common": {
|
||||
"name": "@tursodatabase/database-common",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
@@ -2518,10 +2547,51 @@
|
||||
},
|
||||
"packages/native": {
|
||||
"name": "@tursodatabase/database",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"sync/packages/browser": {
|
||||
"name": "@tursodatabase/sync-browser",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/sync-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"playwright": "^1.55.0",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"sync/packages/common": {
|
||||
"name": "@tursodatabase/sync-common",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
},
|
||||
"sync/packages/native": {
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.2.0-pre.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/sync-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
|
||||
@@ -7,7 +7,11 @@
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"sync/packages/common",
|
||||
"sync/packages/native",
|
||||
"sync/packages/browser"
|
||||
],
|
||||
"version": "0.1.5-pre.5"
|
||||
"version": "0.2.0-pre.1"
|
||||
}
|
||||
|
||||
8
bindings/javascript/packages/browser-common/README.md
Normal file
8
bindings/javascript/packages/browser-common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/database` or `@tursodatabase/database-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
239
bindings/javascript/packages/browser-common/index.ts
Normal file
239
bindings/javascript/packages/browser-common/index.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
function getUint8ArrayFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): Uint8Array {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
function getStringFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): string {
|
||||
const shared = getUint8ArrayFromMemory(memory, ptr, len);
|
||||
const copy = new Uint8Array(shared.length);
|
||||
copy.set(shared);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
return decoder.decode(copy);
|
||||
}
|
||||
|
||||
interface BrowserImports {
|
||||
is_web_worker(): boolean;
|
||||
lookup_file(ptr: number, len: number): number;
|
||||
read(handle: number, ptr: number, len: number, offset: number): number;
|
||||
write(handle: number, ptr: number, len: number, offset: number): number;
|
||||
sync(handle: number): number;
|
||||
truncate(handle: number, len: number): number;
|
||||
size(handle: number): number;
|
||||
}
|
||||
|
||||
function panic(name): never {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const MainDummyImports: BrowserImports = {
|
||||
is_web_worker: function (): boolean {
|
||||
return false;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
panic("lookup_file")
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("read")
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("write")
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
panic("sync")
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
panic("truncate")
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
panic("size")
|
||||
}
|
||||
};
|
||||
|
||||
function workerImports(opfs: OpfsDirectory, memory: WebAssembly.Memory): BrowserImports {
|
||||
return {
|
||||
is_web_worker: function (): boolean {
|
||||
return true;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
try {
|
||||
const handle = opfs.lookupFileHandle(getStringFromMemory(memory, ptr, len));
|
||||
return handle == null ? -404 : handle;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.read(handle, getUint8ArrayFromMemory(memory, ptr, len), offset);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.write(handle, getUint8ArrayFromMemory(memory, ptr, len), offset)
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
try {
|
||||
opfs.sync(handle);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
try {
|
||||
opfs.truncate(handle, len);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
try {
|
||||
return opfs.size(handle);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OpfsDirectory {
|
||||
fileByPath: Map<String, { handle: number, sync: FileSystemSyncAccessHandle }>;
|
||||
fileByHandle: Map<number, FileSystemSyncAccessHandle>;
|
||||
fileHandleNo: number;
|
||||
|
||||
constructor() {
|
||||
this.fileByPath = new Map();
|
||||
this.fileByHandle = new Map();
|
||||
this.fileHandleNo = 0;
|
||||
}
|
||||
|
||||
async registerFile(path: string) {
|
||||
if (this.fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
this.fileHandleNo += 1;
|
||||
this.fileByPath.set(path, { handle: this.fileHandleNo, sync: opfsSync });
|
||||
this.fileByHandle.set(this.fileHandleNo, opfsSync);
|
||||
}
|
||||
|
||||
async unregisterFile(path: string) {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
this.fileByPath.delete(path);
|
||||
this.fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
lookupFileHandle(path: string): number | null {
|
||||
try {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', path, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
read(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
write(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
sync(handle: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
file.flush();
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
truncate(handle: number, size: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
size(handle: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var workerRequestId = 0;
|
||||
function waitForWorkerResponse(worker: Worker, id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => worker.removeEventListener("message", callback);
|
||||
|
||||
worker.addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
function registerFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "register", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
function unregisterFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "unregister", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
export { OpfsDirectory, workerImports, MainDummyImports, waitForWorkerResponse, registerFileAtWorker, unregisterFileAtWorker }
|
||||
25
bindings/javascript/packages/browser-common/package.json
Normal file
25
bindings/javascript/packages/browser-common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
@@ -25,10 +26,6 @@ const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
function panic(name) {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
@@ -49,14 +46,8 @@ const {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
is_web_worker: () => false,
|
||||
lookup_file: () => panic("lookup_file"),
|
||||
read: () => panic("read"),
|
||||
write: () => panic("write"),
|
||||
sync: () => panic("sync"),
|
||||
truncate: () => panic("truncate"),
|
||||
size: () => panic("size"),
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
@@ -40,6 +40,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
"@tursodatabase/database-browser-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +1,24 @@
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError } from "@tursodatabase/database-common"
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError, } from "@tursodatabase/database-common"
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
|
||||
let workerRequestId = 0;
|
||||
class Database extends DatabasePromise {
|
||||
files: string[];
|
||||
constructor(db: NativeDatabase, files: string[], opts: DatabaseOpts = {}) {
|
||||
path: string | null;
|
||||
constructor(db: NativeDatabase, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.files = files;
|
||||
this.path = fsPath;
|
||||
}
|
||||
async close() {
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += this.files.length;
|
||||
|
||||
let tasks = [];
|
||||
for (const file of this.files) {
|
||||
(MainWorker as any).postMessage({ __turso__: "unregister", path: file, id: currentId });
|
||||
tasks.push(waitFor(currentId));
|
||||
currentId += 1;
|
||||
if (this.path != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.path),
|
||||
unregisterFileAtWorker(MainWorker, `${this.path}-wal`)
|
||||
]);
|
||||
}
|
||||
await Promise.all(tasks);
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
function waitFor(id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => (MainWorker as any).removeEventListener("message", callback);
|
||||
|
||||
(MainWorker as any).addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
@@ -55,24 +29,18 @@ function waitFor(id: number): Promise<any> {
|
||||
async function connect(path: string, opts: DatabaseOpts = {}): Promise<Database> {
|
||||
if (path == ":memory:") {
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
return new Database(db, [], opts);
|
||||
return new Database(db, null, opts);
|
||||
}
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += 2;
|
||||
|
||||
let dbHandlePromise = waitFor(currentId);
|
||||
let walHandlePromise = waitFor(currentId + 1);
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}`, id: currentId });
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}-wal`, id: currentId + 1 });
|
||||
await Promise.all([dbHandlePromise, walHandlePromise]);
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, path),
|
||||
registerFileAtWorker(MainWorker, `${path}-wal`)
|
||||
]);
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
const files = [path, `${path}-wal`];
|
||||
return new Database(db, files, opts);
|
||||
return new Database(db, path, opts);
|
||||
}
|
||||
|
||||
export { connect, Database, SqliteError }
|
||||
|
||||
@@ -5,9 +5,12 @@
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
"es2020",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
|
||||
@@ -1,108 +1,9 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from '@tursodatabase/database-browser-common';
|
||||
|
||||
var fileByPath = new Map();
|
||||
var fileByHandle = new Map();
|
||||
let fileHandles = 0;
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
function getUint8ArrayFromWasm(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
|
||||
async function registerFile(path) {
|
||||
if (fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
fileHandles += 1;
|
||||
fileByPath.set(path, { handle: fileHandles, sync: opfsSync });
|
||||
fileByHandle.set(fileHandles, opfsSync);
|
||||
}
|
||||
|
||||
async function unregisterFile(path) {
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
fileByPath.delete(path);
|
||||
fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
|
||||
function lookup_file(pathPtr, pathLen) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(pathPtr, pathLen);
|
||||
const notShared = new Uint8Array(buffer.length);
|
||||
notShared.set(buffer);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const path = decoder.decode(notShared);
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return -404;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', pathPtr, pathLen, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function read(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function write(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function sync(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
file.flush();
|
||||
return 0;
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function truncate(handle, size) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function size(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
@@ -124,14 +25,8 @@ const handler = new MessageHandler({
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
is_web_worker: () => true,
|
||||
lookup_file: lookup_file,
|
||||
read: read,
|
||||
write: write,
|
||||
sync: sync,
|
||||
truncate: truncate,
|
||||
size: size,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -141,16 +36,16 @@ const handler = new MessageHandler({
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await registerFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await unregisterFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-common",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
|
||||
@@ -18,7 +18,6 @@ export interface NativeDatabase {
|
||||
|
||||
prepare(sql: string): NativeStatement;
|
||||
|
||||
pluck(pluckMode: boolean);
|
||||
defaultSafeIntegers(toggle: boolean);
|
||||
totalChanges(): number;
|
||||
changes(): number;
|
||||
@@ -32,6 +31,11 @@ export const STEP_ROW = 1;
|
||||
export const STEP_DONE = 2;
|
||||
export const STEP_IO = 3;
|
||||
|
||||
export interface TableColumn {
|
||||
name: string,
|
||||
type: string
|
||||
}
|
||||
|
||||
export interface NativeStatement {
|
||||
stepAsync(): Promise<number>;
|
||||
stepSync(): number;
|
||||
@@ -39,7 +43,7 @@ export interface NativeStatement {
|
||||
pluck(pluckMode: boolean);
|
||||
safeIntegers(toggle: boolean);
|
||||
raw(toggle: boolean);
|
||||
columns(): string[];
|
||||
columns(): TableColumn[];
|
||||
row(): any;
|
||||
reset();
|
||||
finalize();
|
||||
|
||||
@@ -11,6 +11,8 @@ export declare class Database {
|
||||
constructor(path: string, opts?: DatabaseOpts | undefined | null)
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get memory(): boolean
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get path(): string
|
||||
/** Returns whether the database connection is open. */
|
||||
get open(): boolean
|
||||
/**
|
||||
@@ -32,7 +34,7 @@ export declare class Database {
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchAsync(sql: string): Promise<unknown>
|
||||
batchAsync(sql: string): Promise<void>
|
||||
/**
|
||||
* Prepares a statement for execution.
|
||||
*
|
||||
@@ -123,7 +125,7 @@ export declare class Statement {
|
||||
* Step the statement and return result code (executed on the background thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepAsync(): Promise<unknown>
|
||||
stepAsync(): Promise<number>
|
||||
/** Get the current row data according to the presentation mode */
|
||||
row(): unknown
|
||||
/** Sets the presentation mode to raw. */
|
||||
@@ -139,7 +141,7 @@ export declare class Statement {
|
||||
*/
|
||||
safeIntegers(toggle?: boolean | undefined | null): void
|
||||
/** Get column information for the statement */
|
||||
columns(): unknown[]
|
||||
columns(): Promise<any>
|
||||
/** Finalizes the statement. */
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
@@ -81,8 +81,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-android-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-android-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -97,8 +97,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-android-arm-eabi')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-android-arm-eabi/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -117,8 +117,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-x64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-x64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -133,8 +133,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-ia32-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-ia32-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -149,8 +149,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-win32-arm64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-win32-arm64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -168,8 +168,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-universal')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-universal/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -184,8 +184,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -200,8 +200,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-darwin-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-darwin-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -220,8 +220,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-freebsd-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-freebsd-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -236,8 +236,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-freebsd-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-freebsd-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -257,8 +257,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-x64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-x64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -273,8 +273,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-x64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-x64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -291,8 +291,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -307,8 +307,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -325,8 +325,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm-musleabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm-musleabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -341,8 +341,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-arm-gnueabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-arm-gnueabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -359,8 +359,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-riscv64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-riscv64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -375,8 +375,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-riscv64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-riscv64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -392,8 +392,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-ppc64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-ppc64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -408,8 +408,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-linux-s390x-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-linux-s390x-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -428,8 +428,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -444,8 +444,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
@@ -460,8 +460,8 @@ function requireNative() {
|
||||
try {
|
||||
const binding = require('@tursodatabase/database-openharmony-arm')
|
||||
const bindingPackageVersion = require('@tursodatabase/database-openharmony-arm/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@tursodatabase/database",
|
||||
"version": "0.1.5-pre.5",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
@@ -44,7 +44,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5-pre.5"
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
|
||||
11
bindings/javascript/replace.sh
Normal file
11
bindings/javascript/replace.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/common/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/native/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/common/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/native/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/promise.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/compat.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/browser/promise.ts
|
||||
@@ -28,7 +28,6 @@ pub fn init_thread_pool() -> napi::Result<AsyncTask<NoopTask>> {
|
||||
|
||||
pub struct ConnectTask {
|
||||
path: String,
|
||||
is_memory: bool,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
}
|
||||
|
||||
@@ -70,7 +69,7 @@ impl Task for ConnectTask {
|
||||
Some(result.db),
|
||||
self.io.clone(),
|
||||
result.conn,
|
||||
self.is_memory,
|
||||
self.path.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -88,16 +87,11 @@ pub fn connect(path: String, opts: Option<DatabaseOpts>) -> Result<AsyncTask<Con
|
||||
let task = if is_memory(&path) {
|
||||
ConnectTask {
|
||||
io: Arc::new(turso_core::MemoryIO::new()),
|
||||
is_memory: true,
|
||||
path,
|
||||
}
|
||||
} else {
|
||||
let io = Arc::new(Opfs::new()?);
|
||||
ConnectTask {
|
||||
io,
|
||||
is_memory: false,
|
||||
path,
|
||||
}
|
||||
ConnectTask { io, path }
|
||||
};
|
||||
Ok(AsyncTask::new(task))
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ pub struct Database {
|
||||
_db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Option<Arc<turso_core::Connection>>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
is_open: Cell<bool>,
|
||||
default_safe_integers: Cell<bool>,
|
||||
}
|
||||
@@ -186,20 +186,20 @@ impl Database {
|
||||
.connect()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to connect: {e}")))?;
|
||||
|
||||
Ok(Self::create(Some(db), io, conn, is_memory(&path)))
|
||||
Ok(Self::create(Some(db), io, conn, path))
|
||||
}
|
||||
|
||||
pub fn create(
|
||||
db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
) -> Self {
|
||||
Database {
|
||||
_db: db,
|
||||
io,
|
||||
conn: Some(conn),
|
||||
is_memory,
|
||||
path,
|
||||
is_open: Cell::new(true),
|
||||
default_safe_integers: Cell::new(false),
|
||||
}
|
||||
@@ -218,7 +218,13 @@ impl Database {
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn memory(&self) -> bool {
|
||||
self.is_memory
|
||||
is_memory(&self.path)
|
||||
}
|
||||
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn path(&self) -> String {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
/// Returns whether the database connection is open.
|
||||
@@ -246,7 +252,7 @@ impl Database {
|
||||
/// * `sql` - The SQL statements to execute.
|
||||
///
|
||||
/// # Returns
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn batch_async(&self, sql: String) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Batch {
|
||||
conn: self.conn()?.clone(),
|
||||
@@ -319,7 +325,7 @@ impl Database {
|
||||
#[napi]
|
||||
pub fn close(&mut self) -> Result<()> {
|
||||
self.is_open.set(false);
|
||||
let _ = self._db.take().unwrap();
|
||||
let _ = self._db.take();
|
||||
let _ = self.conn.take().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
@@ -338,7 +344,7 @@ impl Database {
|
||||
#[napi]
|
||||
pub fn io_loop_sync(&self) -> Result<()> {
|
||||
self.io
|
||||
.run_once()
|
||||
.step()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("IO error: {e}")))?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -482,7 +488,7 @@ impl Statement {
|
||||
|
||||
/// Step the statement and return result code (executed on the background thread):
|
||||
/// 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<number>")]
|
||||
pub fn step_async(&self) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Step {
|
||||
stmt: self.stmt.clone(),
|
||||
@@ -577,7 +583,7 @@ impl Statement {
|
||||
}
|
||||
|
||||
/// Get column information for the statement
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<any>")]
|
||||
pub fn columns<'env>(&self, env: &'env Env) -> Result<Array<'env>> {
|
||||
let stmt_ref = self.stmt.borrow();
|
||||
let stmt = stmt_ref
|
||||
@@ -631,7 +637,7 @@ impl Task for IoLoopTask {
|
||||
type JsValue = ();
|
||||
|
||||
fn compute(&mut self) -> napi::Result<Self::Output> {
|
||||
self.io.run_once().map_err(|e| {
|
||||
self.io.step().map_err(|e| {
|
||||
napi::Error::new(napi::Status::GenericFailure, format!("IO error: {e}"))
|
||||
})?;
|
||||
Ok(())
|
||||
|
||||
@@ -21,3 +21,6 @@ tracing-subscriber = "0.3.19"
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.2.3"
|
||||
|
||||
[features]
|
||||
browser = ["turso_node/browser"]
|
||||
124
bindings/javascript/sync/packages/browser/README.md
Normal file
124
bindings/javascript/sync/packages/browser/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Browser</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Browser.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database-browser
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
@@ -1,17 +1,18 @@
|
||||
import {
|
||||
createOnMessage as __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext as __emnapiGetDefaultContext,
|
||||
instantiateNapiModuleSync as __emnapiInstantiateNapiModuleSync,
|
||||
instantiateNapiModule as __emnapiInstantiateNapiModule,
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
version: 'preview1',
|
||||
})
|
||||
|
||||
const __wasmUrl = new URL('./turso-sync-js.wasm32-wasi.wasm', import.meta.url).href
|
||||
const __wasmUrl = new URL('./sync.wasm32-wasi.wasm', import.meta.url).href
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
|
||||
@@ -23,19 +24,21 @@ const __sharedMemory = new WebAssembly.Memory({
|
||||
|
||||
const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
napiModule: __napiModule,
|
||||
} = __emnapiInstantiateNapiModuleSync(__wasmFile, {
|
||||
} = await __emnapiInstantiateNapiModule(__wasmFile, {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: 4,
|
||||
asyncWorkPoolSize: 1,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(new URL('./wasi-worker-browser.mjs', import.meta.url), {
|
||||
const worker = new Worker(new URL('./worker.mjs', import.meta.url), {
|
||||
type: 'module',
|
||||
})
|
||||
|
||||
MainWorker = worker;
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
@@ -43,6 +46,7 @@ const {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
}
|
||||
return importObject
|
||||
@@ -58,11 +62,15 @@ const {
|
||||
export default __napiModule.exports
|
||||
export const Database = __napiModule.exports.Database
|
||||
export const Statement = __napiModule.exports.Statement
|
||||
export const Opfs = __napiModule.exports.Opfs
|
||||
export const OpfsFile = __napiModule.exports.OpfsFile
|
||||
export const connect = __napiModule.exports.connect
|
||||
export const initThreadPool = __napiModule.exports.initThreadPool
|
||||
export const GeneratorHolder = __napiModule.exports.GeneratorHolder
|
||||
export const JsDataCompletion = __napiModule.exports.JsDataCompletion
|
||||
export const JsDataPollResult = __napiModule.exports.JsDataPollResult
|
||||
export const JsProtocolIo = __napiModule.exports.JsProtocolIo
|
||||
export const JsProtocolRequestData = __napiModule.exports.JsProtocolRequestData
|
||||
export const JsProtocolRequestBytes = __napiModule.exports.JsProtocolRequestBytes
|
||||
export const SyncEngine = __napiModule.exports.SyncEngine
|
||||
export const DatabaseChangeTypeJs = __napiModule.exports.DatabaseChangeTypeJs
|
||||
export const SyncEngineProtocolVersion = __napiModule.exports.SyncEngineProtocolVersion
|
||||
|
||||
47
bindings/javascript/sync/packages/browser/package.json
Normal file
47
bindings/javascript/sync/packages/browser/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-browser",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/promise.js",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"index.js",
|
||||
"worker.mjs",
|
||||
"sync.wasm32-wasi.wasm",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"playwright": "^1.55.0",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --features browser --release --platform --target wasm32-wasip1-threads --no-js --manifest-path ../../Cargo.toml --output-dir . && rm index.d.ts sync.wasi* wasi* browser.js",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=chromium --run && VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=firefox --run"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/sync-common": "^0.2.0-pre.1"
|
||||
}
|
||||
}
|
||||
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const stmt = db2.prepare('SELECT * FROM q');
|
||||
const rows = await stmt.all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
stmt.close();
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult, memoryIO } from "@tursodatabase/sync-common";
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
|
||||
let BrowserIo: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
const result = localStorage.getItem(path);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return new TextEncoder().encode(result);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const array = new Uint8Array(data);
|
||||
const value = new TextDecoder('utf-8').decode(array);
|
||||
localStorage.setItem(path, value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
fsPath: string | null;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.fsPath = fsPath;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.db.close();
|
||||
this.engine.close();
|
||||
if (this.fsPath != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.fsPath),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-wal`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-revert`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-info`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-changes`),
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
const isMemory = opts.path == ':memory:';
|
||||
let io = isMemory ? memoryIO() : BrowserIo;
|
||||
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
if (!isMemory) {
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, opts.path),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-wal`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-revert`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-info`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-changes`),
|
||||
]);
|
||||
}
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, isMemory ? null : opts.path, {});
|
||||
}
|
||||
|
||||
export { connect, Database, }
|
||||
export type { DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"declaration": true,
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"dom",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
"paths": {
|
||||
"#entry-point": [
|
||||
"#index": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
define: {
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false',
|
||||
},
|
||||
server: {
|
||||
headers: {
|
||||
"Cross-Origin-Embedder-Policy": "require-corp",
|
||||
"Cross-Origin-Opener-Policy": "same-origin"
|
||||
},
|
||||
},
|
||||
test: {
|
||||
browser: {
|
||||
enabled: true,
|
||||
provider: 'playwright',
|
||||
instances: [
|
||||
{ browser: 'chromium' },
|
||||
{ browser: 'firefox' }
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1,13 +1,18 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
const wasi = new WASI({
|
||||
print: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log.apply(console, arguments)
|
||||
},
|
||||
printErr: function() {
|
||||
printErr: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error.apply(console, arguments)
|
||||
},
|
||||
@@ -20,6 +25,7 @@ const handler = new MessageHandler({
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
}
|
||||
},
|
||||
@@ -27,6 +33,23 @@ const handler = new MessageHandler({
|
||||
},
|
||||
})
|
||||
|
||||
globalThis.onmessage = function (e) {
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
}
|
||||
handler.handle(e)
|
||||
}
|
||||
8
bindings/javascript/sync/packages/common/README.md
Normal file
8
bindings/javascript/sync/packages/common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso Sync common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/sync` or `@tursodatabase/sync-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
5
bindings/javascript/sync/packages/common/index.ts
Normal file
5
bindings/javascript/sync/packages/common/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { run, memoryIO } from "./run.js"
|
||||
import { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "./types.js"
|
||||
|
||||
export { run, memoryIO, }
|
||||
export type { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
25
bindings/javascript/sync/packages/common/package.json
Normal file
25
bindings/javascript/sync/packages/common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-common",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
127
bindings/javascript/sync/packages/common/run.ts
Normal file
127
bindings/javascript/sync/packages/common/run.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
"use strict";
|
||||
|
||||
import { GeneratorResponse, ProtocolIo, RunOpts } from "./types.js";
|
||||
|
||||
const GENERATOR_RESUME_IO = 0;
|
||||
const GENERATOR_RESUME_DONE = 1;
|
||||
|
||||
interface TrackPromise<T> {
|
||||
promise: Promise<T>,
|
||||
finished: boolean
|
||||
}
|
||||
|
||||
function trackPromise<T>(p: Promise<T>): TrackPromise<T> {
|
||||
let status = { promise: null, finished: false };
|
||||
status.promise = p.finally(() => status.finished = true);
|
||||
return status;
|
||||
}
|
||||
|
||||
function timeoutMs(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function process(opts: RunOpts, io: ProtocolIo, request: any) {
|
||||
const requestType = request.request();
|
||||
const completion = request.completion();
|
||||
if (requestType.type == 'Http') {
|
||||
try {
|
||||
let headers = opts.headers;
|
||||
if (requestType.headers != null && requestType.headers.length > 0) {
|
||||
headers = { ...opts.headers };
|
||||
for (let header of requestType.headers) {
|
||||
headers[header[0]] = header[1];
|
||||
}
|
||||
}
|
||||
const response = await fetch(`${opts.url}${requestType.path}`, {
|
||||
method: requestType.method,
|
||||
headers: headers,
|
||||
body: requestType.body != null ? new Uint8Array(requestType.body) : null,
|
||||
});
|
||||
completion.status(response.status);
|
||||
const reader = response.body.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
completion.done();
|
||||
break;
|
||||
}
|
||||
completion.pushBuffer(value);
|
||||
}
|
||||
} catch (error) {
|
||||
completion.poison(`fetch error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullRead') {
|
||||
try {
|
||||
const metadata = await io.read(requestType.path);
|
||||
if (metadata != null) {
|
||||
completion.pushBuffer(metadata);
|
||||
}
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata read error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullWrite') {
|
||||
try {
|
||||
await io.write(requestType.path, requestType.content);
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata write error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'Transform') {
|
||||
if (opts.transform == null) {
|
||||
completion.poison("transform is not set");
|
||||
return;
|
||||
}
|
||||
const results = [];
|
||||
for (const mutation of requestType.mutations) {
|
||||
const result = opts.transform(mutation);
|
||||
if (result == null) {
|
||||
results.push({ type: 'Keep' });
|
||||
} else if (result.operation == 'skip') {
|
||||
results.push({ type: 'Skip' });
|
||||
} else if (result.operation == 'rewrite') {
|
||||
results.push({ type: 'Rewrite', stmt: result.stmt });
|
||||
} else {
|
||||
completion.poison("unexpected transform operation");
|
||||
return;
|
||||
}
|
||||
}
|
||||
completion.pushTransform(results);
|
||||
completion.done();
|
||||
}
|
||||
}
|
||||
|
||||
export function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export async function run(opts: RunOpts, io: ProtocolIo, engine: any, generator: any): Promise<any> {
|
||||
let tasks = [];
|
||||
while (true) {
|
||||
const { type, ...rest }: GeneratorResponse = await generator.resumeAsync(null);
|
||||
if (type == 'Done') {
|
||||
return null;
|
||||
}
|
||||
if (type == 'SyncEngineStats') {
|
||||
return rest;
|
||||
}
|
||||
for (let request = engine.protocolIo(); request != null; request = engine.protocolIo()) {
|
||||
tasks.push(trackPromise(process(opts, io, request)));
|
||||
}
|
||||
|
||||
const tasksRace = tasks.length == 0 ? Promise.resolve() : Promise.race([timeoutMs(opts.preemptionMs), ...tasks.map(t => t.promise)]);
|
||||
await Promise.all([engine.ioLoopAsync(), tasksRace]);
|
||||
|
||||
tasks = tasks.filter(t => !t.finished);
|
||||
}
|
||||
return generator.take();
|
||||
}
|
||||
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"dom"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
50
bindings/javascript/sync/packages/common/types.ts
Normal file
50
bindings/javascript/sync/packages/common/types.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
export declare const enum DatabaseChangeType {
|
||||
Insert = 0,
|
||||
Update = 1,
|
||||
Delete = 2
|
||||
}
|
||||
|
||||
export interface DatabaseRowMutation {
|
||||
changeTime: number
|
||||
tableName: string
|
||||
id: number
|
||||
changeType: DatabaseChangeType
|
||||
before?: Record<string, any>
|
||||
after?: Record<string, any>
|
||||
updates?: Record<string, any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResult = { operation: 'skip' } | { operation: 'rewrite', stmt: DatabaseRowStatement } | null;
|
||||
export type Transform = (arg: DatabaseRowMutation) => DatabaseRowTransformResult;
|
||||
export interface RunOpts {
|
||||
preemptionMs: number,
|
||||
url: string,
|
||||
headers: { [K: string]: string }
|
||||
transform?: Transform,
|
||||
}
|
||||
|
||||
export interface ProtocolIo {
|
||||
read(path: string): Promise<Buffer | Uint8Array | null>;
|
||||
write(path: string, content: Buffer | Uint8Array): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SyncOpts {
|
||||
path: string;
|
||||
clientName?: string;
|
||||
url: string;
|
||||
authToken?: string;
|
||||
encryptionKey?: string;
|
||||
tablesIgnore?: string[],
|
||||
transform?: Transform,
|
||||
tracing?: string,
|
||||
}
|
||||
|
||||
export interface DatabaseRowStatement {
|
||||
sql: string
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }
|
||||
125
bindings/javascript/sync/packages/native/README.md
Normal file
125
bindings/javascript/sync/packages/native/README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Node</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Node.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
- **Cross-platform**: Supports Linux (x86 and arm64), macOS, Windows (browser is supported in the separate package `@tursodatabase/database-browser` package)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
@@ -8,13 +8,15 @@ export declare class Database {
|
||||
* # Arguments
|
||||
* * `path` - The path to the database file.
|
||||
*/
|
||||
constructor(path: string)
|
||||
constructor(path: string, opts?: DatabaseOpts | undefined | null)
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get memory(): boolean
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get path(): string
|
||||
/** Returns whether the database connection is open. */
|
||||
get open(): boolean
|
||||
/**
|
||||
* Executes a batch of SQL statements.
|
||||
* Executes a batch of SQL statements on main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
@@ -22,7 +24,17 @@ export declare class Database {
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batch(sql: string): void
|
||||
batchSync(sql: string): void
|
||||
/**
|
||||
* Executes a batch of SQL statements outside of main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statements to execute.
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchAsync(sql: string): Promise<void>
|
||||
/**
|
||||
* Prepares a statement for execution.
|
||||
*
|
||||
@@ -105,10 +117,15 @@ export declare class Statement {
|
||||
*/
|
||||
bindAt(index: number, value: unknown): void
|
||||
/**
|
||||
* Step the statement and return result code:
|
||||
* Step the statement and return result code (executed on the main thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
step(): number
|
||||
stepSync(): number
|
||||
/**
|
||||
* Step the statement and return result code (executed on the background thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepAsync(): Promise<number>
|
||||
/** Get the current row data according to the presentation mode */
|
||||
row(): unknown
|
||||
/** Sets the presentation mode to raw. */
|
||||
@@ -124,31 +141,32 @@ export declare class Statement {
|
||||
*/
|
||||
safeIntegers(toggle?: boolean | undefined | null): void
|
||||
/** Get column information for the statement */
|
||||
columns(): unknown[]
|
||||
columns(): Promise<any>
|
||||
/** Finalizes the statement. */
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
export declare class GeneratorHolder {
|
||||
resume(error?: string | undefined | null): number
|
||||
take(): GeneratorResponse | null
|
||||
resumeSync(error?: string | undefined | null): GeneratorResponse
|
||||
resumeAsync(error?: string | undefined | null): Promise<unknown>
|
||||
}
|
||||
|
||||
export declare class JsDataCompletion {
|
||||
poison(err: string): void
|
||||
status(value: number): void
|
||||
push(value: Buffer): void
|
||||
pushBuffer(value: Buffer): void
|
||||
pushTransform(values: Array<DatabaseRowTransformResultJs>): void
|
||||
done(): void
|
||||
}
|
||||
|
||||
export declare class JsDataPollResult {
|
||||
|
||||
}
|
||||
|
||||
export declare class JsProtocolIo {
|
||||
takeRequest(): JsProtocolRequestData | null
|
||||
takeRequest(): JsProtocolRequestBytes | null
|
||||
}
|
||||
|
||||
export declare class JsProtocolRequestData {
|
||||
export declare class JsProtocolRequestBytes {
|
||||
request(): JsProtocolRequest
|
||||
completion(): JsDataCompletion
|
||||
}
|
||||
@@ -159,13 +177,14 @@ export declare class SyncEngine {
|
||||
ioLoopSync(): void
|
||||
/** Runs the I/O loop asynchronously, returning a Promise. */
|
||||
ioLoopAsync(): Promise<void>
|
||||
protocolIo(): JsProtocolRequestData | null
|
||||
protocolIo(): JsProtocolRequestBytes | null
|
||||
sync(): GeneratorHolder
|
||||
push(): GeneratorHolder
|
||||
stats(): GeneratorHolder
|
||||
pull(): GeneratorHolder
|
||||
checkpoint(): GeneratorHolder
|
||||
open(): Database
|
||||
close(): void
|
||||
}
|
||||
|
||||
export declare const enum DatabaseChangeTypeJs {
|
||||
@@ -193,21 +212,29 @@ export interface DatabaseRowStatementJs {
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResultJs =
|
||||
| { type: 'Keep' }
|
||||
| { type: 'Skip' }
|
||||
| { type: 'Rewrite', stmt: DatabaseRowStatementJs }
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'SyncEngineStats', operations: number, wal: number }
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime?: number }
|
||||
|
||||
export type JsProtocolRequest =
|
||||
| { type: 'Http', method: string, path: string, body?: Array<number>, headers: Array<[string, string]> }
|
||||
| { type: 'FullRead', path: string }
|
||||
| { type: 'FullWrite', path: string, content: Array<number> }
|
||||
| { type: 'Transform', mutations: Array<DatabaseRowMutationJs> }
|
||||
|
||||
export interface SyncEngineOpts {
|
||||
path: string
|
||||
clientName?: string
|
||||
walPullBatchSize?: number
|
||||
enableTracing?: string
|
||||
tracing?: string
|
||||
tablesIgnore?: Array<string>
|
||||
transform?: (arg: DatabaseRowMutationJs) => DatabaseRowStatementJs | null
|
||||
useTransform: boolean
|
||||
protocolVersion?: SyncEngineProtocolVersion
|
||||
}
|
||||
|
||||
520
bindings/javascript/sync/packages/native/index.js
Normal file
520
bindings/javascript/sync/packages/native/index.js
Normal file
@@ -0,0 +1,520 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm-eabi')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm-eabi/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-x64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-x64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./sync.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-ia32-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-ia32-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-arm64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-arm64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./sync.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-universal')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-universal/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-musleabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-musleabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-gnueabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-gnueabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./sync.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-ppc64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-ppc64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./sync.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-s390x-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-s390x-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.openharmony-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./sync.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/sync-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement, GeneratorHolder, JsDataCompletion, JsProtocolIo, JsProtocolRequestBytes, SyncEngine, DatabaseChangeTypeJs, SyncEngineProtocolVersion } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
export { GeneratorHolder }
|
||||
export { JsDataCompletion }
|
||||
export { JsProtocolIo }
|
||||
export { JsProtocolRequestBytes }
|
||||
export { SyncEngine }
|
||||
export { DatabaseChangeTypeJs }
|
||||
export { SyncEngineProtocolVersion }
|
||||
53
bindings/javascript/sync/packages/native/package.json
Normal file
53
bindings/javascript/sync/packages/native/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.2.0-pre.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"license": "MIT",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --platform --release --esm --manifest-path ../../Cargo.toml --output-dir .",
|
||||
"napi-dirs": "napi create-npm-dirs",
|
||||
"napi-artifacts": "napi artifacts --output-dir .",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 vitest --run",
|
||||
"prepublishOnly": "npm run napi-dirs && npm run napi-artifacts && napi prepublish -t npm"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.2.0-pre.1",
|
||||
"@tursodatabase/sync-common": "^0.2.0-pre.1"
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
}
|
||||
}
|
||||
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const rows = await db2.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
} finally {
|
||||
unlinkSync(path);
|
||||
unlinkSync(`${path}-wal`);
|
||||
unlinkSync(`${path}-info`);
|
||||
unlinkSync(`${path}-changes`);
|
||||
try { unlinkSync(`${path}-revert`) } catch (e) { }
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "@tursodatabase/sync-common";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
import { promises } from "node:fs";
|
||||
|
||||
let NodeIO: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
try {
|
||||
return await promises.readFile(path);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const unix = Math.floor(Date.now() / 1000);
|
||||
const nonce = Math.floor(Math.random() * 1000000000);
|
||||
const tmp = `${path}.tmp.${unix}.${nonce}`;
|
||||
await promises.writeFile(tmp, new Uint8Array(data));
|
||||
try {
|
||||
await promises.rename(tmp, path);
|
||||
} catch (err) {
|
||||
await promises.unlink(tmp);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.engine.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
let io = opts.path == ':memory:' ? memoryIO() : NodeIO;
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, {});
|
||||
}
|
||||
|
||||
export { connect, Database, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.d.ts"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
102
bindings/javascript/sync/src/generator.rs
Normal file
102
bindings/javascript/sync/src/generator.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use napi::{bindgen_prelude::AsyncTask, Env, Task};
|
||||
use napi_derive::napi;
|
||||
use std::{
|
||||
future::Future,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use turso_sync_engine::types::ProtocolCommand;
|
||||
|
||||
pub const GENERATOR_RESUME_IO: u32 = 0;
|
||||
pub const GENERATOR_RESUME_DONE: u32 = 1;
|
||||
|
||||
pub trait Generator {
|
||||
fn resume(&mut self, result: Option<String>) -> napi::Result<GeneratorResponse>;
|
||||
}
|
||||
|
||||
impl<F: Future<Output = turso_sync_engine::Result<()>>> Generator
|
||||
for genawaiter::sync::Gen<ProtocolCommand, turso_sync_engine::Result<()>, F>
|
||||
{
|
||||
fn resume(&mut self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = match error {
|
||||
Some(err) => Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
format!("JsProtocolIo error: {err}"),
|
||||
)),
|
||||
None => Ok(()),
|
||||
};
|
||||
match self.resume_with(result) {
|
||||
genawaiter::GeneratorState::Yielded(ProtocolCommand::IO) => Ok(GeneratorResponse::IO),
|
||||
genawaiter::GeneratorState::Complete(Ok(())) => Ok(GeneratorResponse::Done),
|
||||
genawaiter::GeneratorState::Complete(Err(err)) => Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("sync engine operation failed: {err}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
pub enum GeneratorResponse {
|
||||
IO,
|
||||
Done,
|
||||
SyncEngineStats {
|
||||
operations: i64,
|
||||
main_wal: i64,
|
||||
revert_wal: i64,
|
||||
last_pull_unix_time: i64,
|
||||
last_push_unix_time: Option<i64>,
|
||||
},
|
||||
}
|
||||
|
||||
#[napi]
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratorHolder {
|
||||
pub(crate) generator: Arc<Mutex<dyn Generator>>,
|
||||
pub(crate) response: Arc<Mutex<Option<GeneratorResponse>>>,
|
||||
}
|
||||
|
||||
pub struct ResumeTask {
|
||||
holder: GeneratorHolder,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
unsafe impl Send for ResumeTask {}
|
||||
|
||||
impl Task for ResumeTask {
|
||||
type Output = GeneratorResponse;
|
||||
type JsValue = GeneratorResponse;
|
||||
|
||||
fn compute(&mut self) -> napi::Result<Self::Output> {
|
||||
resume_sync(&self.holder, self.error.take())
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: Env, output: Self::Output) -> napi::Result<Self::JsValue> {
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
fn resume_sync(holder: &GeneratorHolder, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = holder.generator.lock().unwrap().resume(error)?;
|
||||
if let GeneratorResponse::Done = result {
|
||||
let response = holder.response.lock().unwrap().take();
|
||||
Ok(response.unwrap_or(GeneratorResponse::Done))
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GeneratorHolder {
|
||||
#[napi]
|
||||
pub fn resume_sync(&self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
resume_sync(self, error)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn resume_async(&self, error: Option<String>) -> napi::Result<AsyncTask<ResumeTask>> {
|
||||
Ok(AsyncTask::new(ResumeTask {
|
||||
holder: self.clone(),
|
||||
error,
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,15 @@ use std::{
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use turso_sync_engine::protocol_io::{DataCompletion, DataPollResult, ProtocolIO};
|
||||
use turso_sync_engine::{
|
||||
protocol_io::{DataCompletion, DataPollResult, ProtocolIO},
|
||||
types::{DatabaseRowTransformResult, DatabaseStatementReplay},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
core_change_type_to_js, core_values_map_to_js, js_value_to_core, DatabaseRowMutationJs,
|
||||
DatabaseRowTransformResultJs,
|
||||
};
|
||||
|
||||
#[napi]
|
||||
pub enum JsProtocolRequest {
|
||||
@@ -24,15 +32,34 @@ pub enum JsProtocolRequest {
|
||||
path: String,
|
||||
content: Vec<u8>,
|
||||
},
|
||||
Transform {
|
||||
mutations: Vec<DatabaseRowMutationJs>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[napi]
|
||||
pub struct JsDataCompletion(Arc<Mutex<JsDataCompletionInner>>);
|
||||
|
||||
pub struct JsBytesPollResult(Buffer);
|
||||
|
||||
impl DataPollResult<u8> for JsBytesPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
pub struct JsTransformPollResult(Vec<DatabaseRowTransformResult>);
|
||||
|
||||
impl DataPollResult<DatabaseRowTransformResult> for JsTransformPollResult {
|
||||
fn data(&self) -> &[DatabaseRowTransformResult] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
struct JsDataCompletionInner {
|
||||
status: Option<u16>,
|
||||
chunks: VecDeque<Buffer>,
|
||||
transformed: VecDeque<DatabaseRowTransformResult>,
|
||||
finished: bool,
|
||||
err: Option<String>,
|
||||
}
|
||||
@@ -49,8 +76,8 @@ impl JsDataCompletion {
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion for JsDataCompletion {
|
||||
type DataPollResult = JsDataPollResult;
|
||||
impl DataCompletion<u8> for JsDataCompletion {
|
||||
type DataPollResult = JsBytesPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
@@ -60,7 +87,31 @@ impl DataCompletion for JsDataCompletion {
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.chunks.pop_front();
|
||||
Ok(chunk.map(JsDataPollResult))
|
||||
Ok(chunk.map(JsBytesPollResult))
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.finished)
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion<DatabaseRowTransformResult> for JsDataCompletion {
|
||||
type DataPollResult = JsTransformPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.status)
|
||||
}
|
||||
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.transformed.drain(..).collect::<Vec<_>>();
|
||||
if chunk.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(JsTransformPollResult(chunk)))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
@@ -84,11 +135,28 @@ impl JsDataCompletion {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push(&self, value: Buffer) {
|
||||
pub fn push_buffer(&self, value: Buffer) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.chunks.push_back(value);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push_transform(&self, values: Vec<DatabaseRowTransformResultJs>) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
for value in values {
|
||||
completion.transformed.push_back(match value {
|
||||
DatabaseRowTransformResultJs::Keep => DatabaseRowTransformResult::Keep,
|
||||
DatabaseRowTransformResultJs::Skip => DatabaseRowTransformResult::Skip,
|
||||
DatabaseRowTransformResultJs::Rewrite { stmt } => {
|
||||
DatabaseRowTransformResult::Rewrite(DatabaseStatementReplay {
|
||||
sql: stmt.sql,
|
||||
values: stmt.values.into_iter().map(js_value_to_core).collect(),
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn done(&self) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
@@ -97,22 +165,13 @@ impl JsDataCompletion {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsDataPollResult(Buffer);
|
||||
|
||||
impl DataPollResult for JsDataPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolRequestData {
|
||||
pub struct JsProtocolRequestBytes {
|
||||
request: Arc<Mutex<Option<JsProtocolRequest>>>,
|
||||
completion: JsDataCompletion,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsProtocolRequestData {
|
||||
impl JsProtocolRequestBytes {
|
||||
#[napi]
|
||||
pub fn request(&self) -> JsProtocolRequest {
|
||||
let mut request = self.request.lock().unwrap();
|
||||
@@ -125,7 +184,9 @@ impl JsProtocolRequestData {
|
||||
}
|
||||
|
||||
impl ProtocolIO for JsProtocolIo {
|
||||
type DataCompletion = JsDataCompletion;
|
||||
type DataCompletionBytes = JsDataCompletion;
|
||||
type DataCompletionTransform = JsDataCompletion;
|
||||
|
||||
fn http(
|
||||
&self,
|
||||
method: &str,
|
||||
@@ -144,7 +205,7 @@ impl ProtocolIO for JsProtocolIo {
|
||||
}))
|
||||
}
|
||||
|
||||
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletion> {
|
||||
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullRead {
|
||||
path: path.to_string(),
|
||||
}))
|
||||
@@ -154,17 +215,37 @@ impl ProtocolIO for JsProtocolIo {
|
||||
&self,
|
||||
path: &str,
|
||||
content: Vec<u8>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletion> {
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullWrite {
|
||||
path: path.to_string(),
|
||||
content,
|
||||
}))
|
||||
}
|
||||
|
||||
fn transform(
|
||||
&self,
|
||||
mutations: Vec<turso_sync_engine::types::DatabaseRowMutation>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionTransform> {
|
||||
Ok(self.add_request(JsProtocolRequest::Transform {
|
||||
mutations: mutations
|
||||
.into_iter()
|
||||
.map(|mutation| DatabaseRowMutationJs {
|
||||
change_time: mutation.change_time as i64,
|
||||
table_name: mutation.table_name,
|
||||
id: mutation.id,
|
||||
change_type: core_change_type_to_js(mutation.change_type),
|
||||
before: mutation.before.map(core_values_map_to_js),
|
||||
after: mutation.after.map(core_values_map_to_js),
|
||||
updates: mutation.updates.map(core_values_map_to_js),
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolIo {
|
||||
requests: Mutex<Vec<JsProtocolRequestData>>,
|
||||
requests: Mutex<Vec<JsProtocolRequestBytes>>,
|
||||
}
|
||||
|
||||
impl Default for JsProtocolIo {
|
||||
@@ -178,13 +259,14 @@ impl Default for JsProtocolIo {
|
||||
#[napi]
|
||||
impl JsProtocolIo {
|
||||
#[napi]
|
||||
pub fn take_request(&self) -> Option<JsProtocolRequestData> {
|
||||
pub fn take_request(&self) -> Option<JsProtocolRequestBytes> {
|
||||
self.requests.lock().unwrap().pop()
|
||||
}
|
||||
|
||||
fn add_request(&self, request: JsProtocolRequest) -> JsDataCompletion {
|
||||
let completion = JsDataCompletionInner {
|
||||
chunks: VecDeque::new(),
|
||||
transformed: VecDeque::new(),
|
||||
finished: false,
|
||||
err: None,
|
||||
status: None,
|
||||
@@ -192,7 +274,7 @@ impl JsProtocolIo {
|
||||
let completion = JsDataCompletion(Arc::new(Mutex::new(completion)));
|
||||
|
||||
let mut requests = self.requests.lock().unwrap();
|
||||
requests.push(JsProtocolRequestData {
|
||||
requests.push(JsProtocolRequestBytes {
|
||||
request: Arc::new(Mutex::new(Some(request))),
|
||||
completion: completion.clone(),
|
||||
});
|
||||
@@ -9,24 +9,18 @@ use std::{
|
||||
sync::{Arc, Mutex, OnceLock, RwLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use napi::{
|
||||
bindgen_prelude::{AsyncTask, Either5, Function, FunctionRef, Null},
|
||||
Env,
|
||||
};
|
||||
use napi::bindgen_prelude::{AsyncTask, Either5, Null};
|
||||
use napi_derive::napi;
|
||||
use tracing_subscriber::{filter::LevelFilter, fmt::format::FmtSpan};
|
||||
use turso_node::IoLoopTask;
|
||||
use turso_sync_engine::{
|
||||
database_sync_engine::{DatabaseSyncEngine, DatabaseSyncEngineOpts},
|
||||
types::{
|
||||
Coro, DatabaseChangeType, DatabaseRowMutation, DatabaseRowStatement,
|
||||
DatabaseSyncEngineProtocolVersion,
|
||||
},
|
||||
types::{Coro, DatabaseChangeType, DatabaseSyncEngineProtocolVersion},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
generator::{GeneratorHolder, GeneratorResponse},
|
||||
js_protocol_io::{JsProtocolIo, JsProtocolRequestData},
|
||||
js_protocol_io::{JsProtocolIo, JsProtocolRequestBytes},
|
||||
};
|
||||
|
||||
#[napi(object)]
|
||||
@@ -41,10 +35,10 @@ pub struct SyncEngine {
|
||||
wal_pull_batch_size: u32,
|
||||
protocol_version: DatabaseSyncEngineProtocolVersion,
|
||||
tables_ignore: Vec<String>,
|
||||
transform: Option<FunctionRef<DatabaseRowMutationJs, Option<DatabaseRowStatementJs>>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
protocol: Arc<JsProtocolIo>,
|
||||
sync_engine: Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>,
|
||||
use_transform: bool,
|
||||
io: Option<Arc<dyn turso_core::IO>>,
|
||||
protocol: Option<Arc<JsProtocolIo>>,
|
||||
sync_engine: Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
opened: Arc<Mutex<Option<turso_node::Database>>>,
|
||||
}
|
||||
|
||||
@@ -116,19 +110,27 @@ pub struct DatabaseRowStatementJs {
|
||||
pub values: Vec<Either5<Null, i64, f64, String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
#[derive(Debug)]
|
||||
pub enum DatabaseRowTransformResultJs {
|
||||
Keep,
|
||||
Skip,
|
||||
Rewrite { stmt: DatabaseRowStatementJs },
|
||||
}
|
||||
|
||||
#[napi(object, object_to_js = false)]
|
||||
pub struct SyncEngineOpts {
|
||||
pub path: String,
|
||||
pub client_name: Option<String>,
|
||||
pub wal_pull_batch_size: Option<u32>,
|
||||
pub enable_tracing: Option<String>,
|
||||
pub tracing: Option<String>,
|
||||
pub tables_ignore: Option<Vec<String>>,
|
||||
pub transform: Option<Function<'static, DatabaseRowMutationJs, Option<DatabaseRowStatementJs>>>,
|
||||
pub use_transform: bool,
|
||||
pub protocol_version: Option<SyncEngineProtocolVersion>,
|
||||
}
|
||||
|
||||
static TRACING_INIT: OnceLock<()> = OnceLock::new();
|
||||
fn init_tracing(level_filter: LevelFilter) {
|
||||
pub fn init_tracing(level_filter: LevelFilter) {
|
||||
TRACING_INIT.get_or_init(|| {
|
||||
tracing_subscriber::fmt()
|
||||
.with_ansi(false)
|
||||
@@ -144,7 +146,7 @@ impl SyncEngine {
|
||||
#[napi(constructor)]
|
||||
pub fn new(opts: SyncEngineOpts) -> napi::Result<Self> {
|
||||
// helpful for local debugging
|
||||
match opts.enable_tracing.as_deref() {
|
||||
match opts.tracing.as_deref() {
|
||||
Some("info") => init_tracing(LevelFilter::INFO),
|
||||
Some("debug") => init_tracing(LevelFilter::DEBUG),
|
||||
Some("trace") => init_tracing(LevelFilter::TRACE),
|
||||
@@ -154,23 +156,30 @@ impl SyncEngine {
|
||||
let io: Arc<dyn turso_core::IO> = if is_memory {
|
||||
Arc::new(turso_core::MemoryIO::new())
|
||||
} else {
|
||||
Arc::new(turso_core::PlatformIO::new().map_err(|e| {
|
||||
napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("Failed to create IO: {e}"),
|
||||
)
|
||||
})?)
|
||||
#[cfg(not(feature = "browser"))]
|
||||
{
|
||||
Arc::new(turso_core::PlatformIO::new().map_err(|e| {
|
||||
napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("Failed to create IO: {e}"),
|
||||
)
|
||||
})?)
|
||||
}
|
||||
#[cfg(feature = "browser")]
|
||||
{
|
||||
Arc::new(turso_node::browser::Opfs::new()?)
|
||||
}
|
||||
};
|
||||
Ok(SyncEngine {
|
||||
path: opts.path,
|
||||
client_name: opts.client_name.unwrap_or("turso-sync-js".to_string()),
|
||||
wal_pull_batch_size: opts.wal_pull_batch_size.unwrap_or(100),
|
||||
tables_ignore: opts.tables_ignore.unwrap_or_default(),
|
||||
transform: opts.transform.map(|x| x.create_ref().unwrap()),
|
||||
use_transform: opts.use_transform,
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
sync_engine: Arc::new(RwLock::new(None)),
|
||||
io,
|
||||
protocol: Arc::new(JsProtocolIo::default()),
|
||||
io: Some(io),
|
||||
protocol: Some(Arc::new(JsProtocolIo::default())),
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
opened: Arc::new(Mutex::new(None)),
|
||||
protocol_version: match opts.protocol_version {
|
||||
@@ -183,76 +192,41 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn init(&mut self, env: Env) -> GeneratorHolder {
|
||||
#[allow(clippy::type_complexity)]
|
||||
let transform: Option<
|
||||
Arc<
|
||||
dyn Fn(
|
||||
&Env,
|
||||
DatabaseRowMutation,
|
||||
)
|
||||
-> turso_sync_engine::Result<Option<DatabaseRowStatement>>
|
||||
+ 'static,
|
||||
>,
|
||||
> = match self.transform.take() {
|
||||
Some(f) => Some(Arc::new(move |env, mutation| {
|
||||
let result = f
|
||||
.borrow_back(env)
|
||||
.unwrap()
|
||||
.call(DatabaseRowMutationJs {
|
||||
change_time: mutation.change_time as i64,
|
||||
table_name: mutation.table_name,
|
||||
id: mutation.id,
|
||||
change_type: core_change_type_to_js(mutation.change_type),
|
||||
before: mutation.before.map(core_values_map_to_js),
|
||||
after: mutation.after.map(core_values_map_to_js),
|
||||
updates: mutation.updates.map(core_values_map_to_js),
|
||||
})
|
||||
.map_err(|e| {
|
||||
turso_sync_engine::errors::Error::DatabaseSyncEngineError(format!(
|
||||
"transform callback failed: {e}"
|
||||
))
|
||||
})?;
|
||||
Ok(result.map(|statement| DatabaseRowStatement {
|
||||
sql: statement.sql,
|
||||
values: statement.values.into_iter().map(js_value_to_core).collect(),
|
||||
}))
|
||||
})),
|
||||
None => None,
|
||||
};
|
||||
pub fn init(&mut self) -> napi::Result<GeneratorHolder> {
|
||||
let opts = DatabaseSyncEngineOpts {
|
||||
client_name: self.client_name.clone(),
|
||||
wal_pull_batch_size: self.wal_pull_batch_size as u64,
|
||||
tables_ignore: self.tables_ignore.clone(),
|
||||
transform,
|
||||
use_transform: self.use_transform,
|
||||
protocol_version_hint: self.protocol_version,
|
||||
};
|
||||
|
||||
let protocol = self.protocol.clone();
|
||||
let io = self.io()?;
|
||||
let protocol = self.protocol()?;
|
||||
let sync_engine = self.sync_engine.clone();
|
||||
let io = self.io.clone();
|
||||
let opened = self.opened.clone();
|
||||
let path = self.path.clone();
|
||||
let generator = genawaiter::sync::Gen::new(|coro| async move {
|
||||
let coro = Coro::new(env, coro);
|
||||
let coro = Coro::new((), coro);
|
||||
let initialized =
|
||||
DatabaseSyncEngine::new(&coro, io.clone(), protocol, &path, opts).await?;
|
||||
let connection = initialized.connect_rw(&coro).await?;
|
||||
let db = turso_node::Database::create(None, io.clone(), connection, false);
|
||||
let db = turso_node::Database::create(None, io.clone(), connection, path);
|
||||
|
||||
*sync_engine.write().unwrap() = Some(initialized);
|
||||
*opened.lock().unwrap() = Some(db);
|
||||
Ok(())
|
||||
});
|
||||
GeneratorHolder {
|
||||
inner: Box::new(Mutex::new(generator)),
|
||||
Ok(GeneratorHolder {
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn io_loop_sync(&self) -> napi::Result<()> {
|
||||
self.io.run_once().map_err(|e| {
|
||||
self.io()?.step().map_err(|e| {
|
||||
napi::Error::new(napi::Status::GenericFailure, format!("IO error: {e}"))
|
||||
})?;
|
||||
Ok(())
|
||||
@@ -260,19 +234,19 @@ impl SyncEngine {
|
||||
|
||||
/// Runs the I/O loop asynchronously, returning a Promise.
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn io_loop_async(&self) -> AsyncTask<IoLoopTask> {
|
||||
let io = self.io.clone();
|
||||
AsyncTask::new(IoLoopTask { io })
|
||||
pub fn io_loop_async(&self) -> napi::Result<AsyncTask<IoLoopTask>> {
|
||||
let io = self.io()?;
|
||||
Ok(AsyncTask::new(IoLoopTask { io }))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn protocol_io(&self) -> Option<JsProtocolRequestData> {
|
||||
self.protocol.take_request()
|
||||
pub fn protocol_io(&self) -> napi::Result<Option<JsProtocolRequestBytes>> {
|
||||
Ok(self.protocol()?.take_request())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn sync(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn sync(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.sync(coro).await?;
|
||||
@@ -281,8 +255,8 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn push(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
sync_engine.push_changes_to_remote(coro).await?;
|
||||
@@ -291,38 +265,34 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn stats(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn stats(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
let changes = sync_engine.stats(coro).await?;
|
||||
Ok(Some(GeneratorResponse::SyncEngineStats {
|
||||
operations: changes.cdc_operations,
|
||||
wal: changes.wal_size,
|
||||
main_wal: changes.main_wal_size as i64,
|
||||
revert_wal: changes.revert_wal_size as i64,
|
||||
last_pull_unix_time: changes.last_pull_unix_time,
|
||||
last_push_unix_time: changes.last_push_unix_time,
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn pull(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
let changes = {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
sync_engine.wait_changes_from_remote(coro).await?
|
||||
};
|
||||
if let Some(changes) = changes {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.apply_changes_from_remote(coro, changes).await?;
|
||||
}
|
||||
pub fn pull(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.pull_changes_from_remote(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn checkpoint(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn checkpoint(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.checkpoint(coro).await?;
|
||||
@@ -342,12 +312,38 @@ impl SyncEngine {
|
||||
Ok(opened.clone())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn close(&mut self) {
|
||||
let _ = self.sync_engine.write().unwrap().take();
|
||||
let _ = self.opened.lock().unwrap().take().unwrap();
|
||||
let _ = self.io.take();
|
||||
let _ = self.protocol.take();
|
||||
}
|
||||
|
||||
fn io(&self) -> napi::Result<Arc<dyn turso_core::IO>> {
|
||||
if self.io.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.io.as_ref().unwrap().clone())
|
||||
}
|
||||
fn protocol(&self) -> napi::Result<Arc<JsProtocolIo>> {
|
||||
if self.protocol.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.protocol.as_ref().unwrap().clone())
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
env: Env,
|
||||
f: impl AsyncFnOnce(
|
||||
&Coro<Env>,
|
||||
&Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>,
|
||||
&Coro<()>,
|
||||
&Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
) -> turso_sync_engine::Result<Option<GeneratorResponse>>
|
||||
+ 'static,
|
||||
) -> GeneratorHolder {
|
||||
@@ -357,21 +353,21 @@ impl SyncEngine {
|
||||
let generator = genawaiter::sync::Gen::new({
|
||||
let response = response.clone();
|
||||
|coro| async move {
|
||||
let coro = Coro::new(env, coro);
|
||||
let coro = Coro::new((), coro);
|
||||
*response.lock().unwrap() = f(&coro, &sync_engine).await?;
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
GeneratorHolder {
|
||||
inner: Box::new(Mutex::new(generator)),
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn try_read(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>> {
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_read() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -382,9 +378,8 @@ fn try_read(
|
||||
}
|
||||
|
||||
fn try_write(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>
|
||||
{
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_write() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -395,8 +390,8 @@ fn try_write(
|
||||
}
|
||||
|
||||
fn try_unwrap<'a>(
|
||||
sync_engine: &'a RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<&'a DatabaseSyncEngine<JsProtocolIo, Env>> {
|
||||
sync_engine: &'a RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_ref() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -407,8 +402,8 @@ fn try_unwrap<'a>(
|
||||
}
|
||||
|
||||
fn try_unwrap_mut<'a>(
|
||||
sync_engine: &'a mut RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<&'a mut DatabaseSyncEngine<JsProtocolIo, Env>> {
|
||||
sync_engine: &'a mut RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a mut DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_mut() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -1394,13 +1394,22 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@tursodatabase/database-browser-common@npm:^0.2.0-pre.1, @tursodatabase/database-browser-common@workspace:packages/browser-common":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser-common@workspace:packages/browser-common"
|
||||
dependencies:
|
||||
typescript: "npm:^5.9.2"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/database-browser@workspace:packages/browser":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser@workspace:packages/browser"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@napi-rs/wasm-runtime": "npm:^1.0.3"
|
||||
"@tursodatabase/database-common": "npm:^0.1.5-pre.5"
|
||||
"@tursodatabase/database-browser-common": "npm:^0.2.0-pre.1"
|
||||
"@tursodatabase/database-common": "npm:^0.2.0-pre.1"
|
||||
"@vitest/browser": "npm:^3.2.4"
|
||||
playwright: "npm:^1.55.0"
|
||||
typescript: "npm:^5.9.2"
|
||||
@@ -1408,7 +1417,7 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/database-common@npm:^0.1.5-pre.5, @tursodatabase/database-common@workspace:packages/common":
|
||||
"@tursodatabase/database-common@npm:^0.2.0-pre.1, @tursodatabase/database-common@workspace:packages/common":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-common@workspace:packages/common"
|
||||
dependencies:
|
||||
@@ -1421,7 +1430,44 @@ __metadata:
|
||||
resolution: "@tursodatabase/database@workspace:packages/native"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@tursodatabase/database-common": "npm:^0.1.5-pre.5"
|
||||
"@tursodatabase/database-common": "npm:^0.2.0-pre.1"
|
||||
"@types/node": "npm:^24.3.1"
|
||||
typescript: "npm:^5.9.2"
|
||||
vitest: "npm:^3.2.4"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/sync-browser@workspace:sync/packages/browser":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/sync-browser@workspace:sync/packages/browser"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@napi-rs/wasm-runtime": "npm:^1.0.3"
|
||||
"@tursodatabase/database-browser-common": "npm:^0.2.0-pre.1"
|
||||
"@tursodatabase/database-common": "npm:^0.2.0-pre.1"
|
||||
"@tursodatabase/sync-common": "npm:^0.2.0-pre.1"
|
||||
"@vitest/browser": "npm:^3.2.4"
|
||||
playwright: "npm:^1.55.0"
|
||||
typescript: "npm:^5.9.2"
|
||||
vitest: "npm:^3.2.4"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/sync-common@npm:^0.2.0-pre.1, @tursodatabase/sync-common@workspace:sync/packages/common":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/sync-common@workspace:sync/packages/common"
|
||||
dependencies:
|
||||
typescript: "npm:^5.9.2"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/sync@workspace:sync/packages/native":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/sync@workspace:sync/packages/native"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@tursodatabase/database-common": "npm:^0.2.0-pre.1"
|
||||
"@tursodatabase/sync-common": "npm:^0.2.0-pre.1"
|
||||
"@types/node": "npm:^24.3.1"
|
||||
typescript: "npm:^5.9.2"
|
||||
vitest: "npm:^3.2.4"
|
||||
|
||||
@@ -14,6 +14,7 @@ crate-type = ["cdylib"]
|
||||
[features]
|
||||
# must be enabled when building with `cargo build`, maturin enables this automatically
|
||||
extension-module = ["pyo3/extension-module"]
|
||||
tracing_release = ["turso_core/tracing_release"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
|
||||
@@ -14,6 +14,7 @@ default = ["experimental_indexes"]
|
||||
conn_raw_api = ["turso_core/conn_raw_api"]
|
||||
experimental_indexes = []
|
||||
antithesis = ["turso_core/antithesis"]
|
||||
tracing_release = ["turso_core/tracing_release"]
|
||||
|
||||
[dependencies]
|
||||
turso_core = { workspace = true, features = ["io_uring"] }
|
||||
|
||||
@@ -51,6 +51,7 @@ mimalloc = { workspace = true }
|
||||
[features]
|
||||
default = ["io_uring"]
|
||||
io_uring = ["turso_core/io_uring"]
|
||||
tracing_release = ["turso_core/tracing_release"]
|
||||
|
||||
[build-dependencies]
|
||||
syntect = { git = "https://github.com/trishume/syntect.git", rev = "64644ffe064457265cbcee12a0c1baf9485ba6ee" }
|
||||
|
||||
152
cli/app.rs
152
cli/app.rs
@@ -19,6 +19,7 @@ use comfy_table::{Attribute, Cell, CellAlignment, ContentArrangement, Row, Table
|
||||
use rustyline::{error::ReadlineError, history::DefaultHistory, Editor};
|
||||
use std::{
|
||||
io::{self, BufRead as _, IsTerminal, Write},
|
||||
mem::{forget, ManuallyDrop},
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
@@ -82,7 +83,7 @@ pub struct Limbo {
|
||||
writer: Option<Box<dyn Write>>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
pub interrupt_count: Arc<AtomicUsize>,
|
||||
input_buff: String,
|
||||
input_buff: ManuallyDrop<String>,
|
||||
opts: Settings,
|
||||
pub rl: Option<Editor<LimboHelper, DefaultHistory>>,
|
||||
config: Option<Config>,
|
||||
@@ -149,7 +150,7 @@ macro_rules! row_step_result_query {
|
||||
|
||||
impl Limbo {
|
||||
pub fn new() -> anyhow::Result<(Self, WorkerGuard)> {
|
||||
let opts = Opts::parse();
|
||||
let mut opts = Opts::parse();
|
||||
let guard = Self::init_tracing(&opts)?;
|
||||
|
||||
let db_file = opts
|
||||
@@ -202,7 +203,8 @@ impl Limbo {
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
}
|
||||
let sql = opts.sql.clone();
|
||||
let sql = opts.sql.take();
|
||||
let has_sql = sql.is_some();
|
||||
let quiet = opts.quiet;
|
||||
let config = Config::for_output_mode(opts.output_mode);
|
||||
let mut app = Self {
|
||||
@@ -211,12 +213,12 @@ impl Limbo {
|
||||
writer: Some(get_writer(&opts.output)),
|
||||
conn,
|
||||
interrupt_count,
|
||||
input_buff: String::new(),
|
||||
input_buff: ManuallyDrop::new(sql.unwrap_or_default()),
|
||||
opts: Settings::from(opts),
|
||||
rl: None,
|
||||
config: Some(config),
|
||||
};
|
||||
app.first_run(sql, quiet)?;
|
||||
app.first_run(has_sql, quiet)?;
|
||||
Ok((app, guard))
|
||||
}
|
||||
|
||||
@@ -235,14 +237,14 @@ impl Limbo {
|
||||
self
|
||||
}
|
||||
|
||||
fn first_run(&mut self, sql: Option<String>, quiet: bool) -> Result<(), LimboError> {
|
||||
fn first_run(&mut self, has_sql: bool, quiet: bool) -> Result<(), LimboError> {
|
||||
// Skip startup messages and SQL execution in MCP mode
|
||||
if self.is_mcp_mode() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(sql) = sql {
|
||||
self.handle_first_input(&sql)?;
|
||||
if has_sql {
|
||||
self.handle_first_input()?;
|
||||
}
|
||||
if !quiet {
|
||||
self.writeln_fmt(format_args!("Turso v{}", env!("CARGO_PKG_VERSION")))?;
|
||||
@@ -255,12 +257,8 @@ impl Limbo {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_first_input(&mut self, cmd: &str) -> Result<(), LimboError> {
|
||||
if cmd.trim().starts_with('.') {
|
||||
self.handle_dot_command(&cmd[1..]);
|
||||
} else {
|
||||
self.run_query(cmd);
|
||||
}
|
||||
fn handle_first_input(&mut self) -> Result<(), LimboError> {
|
||||
self.consume(true);
|
||||
self.close_conn()?;
|
||||
std::process::exit(0);
|
||||
}
|
||||
@@ -439,12 +437,6 @@ impl Limbo {
|
||||
.unwrap()
|
||||
.write_all(self.opts.null_value.as_bytes())
|
||||
}
|
||||
|
||||
fn buffer_input(&mut self, line: &str) {
|
||||
self.input_buff.push_str(line);
|
||||
self.input_buff.push(' ');
|
||||
}
|
||||
|
||||
fn run_query(&mut self, input: &str) {
|
||||
let echo = self.opts.echo;
|
||||
if echo {
|
||||
@@ -506,8 +498,6 @@ impl Limbo {
|
||||
let _ = self.writeln(output);
|
||||
}
|
||||
}
|
||||
|
||||
self.reset_input();
|
||||
}
|
||||
|
||||
fn print_query_performance_stats(&mut self, start: Instant, stats: Option<&QueryStatistics>) {
|
||||
@@ -553,35 +543,74 @@ impl Limbo {
|
||||
}
|
||||
}
|
||||
|
||||
fn reset_line(&mut self, _line: &str) -> rustyline::Result<()> {
|
||||
fn reset_line(&mut self) {
|
||||
// Entry is auto added to history
|
||||
// self.rl.add_history_entry(line.to_owned())?;
|
||||
self.interrupt_count.store(0, Ordering::Release);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_input_line(&mut self, line: &str) -> anyhow::Result<()> {
|
||||
if self.input_buff.is_empty() {
|
||||
if line.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(command) = line.strip_prefix('.') {
|
||||
self.handle_dot_command(command);
|
||||
let _ = self.reset_line(line);
|
||||
return Ok(());
|
||||
}
|
||||
// consume will consume `input_buff`
|
||||
pub fn consume(&mut self, flush: bool) {
|
||||
if self.input_buff.trim().is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.reset_line(line)?;
|
||||
if line.ends_with(';') {
|
||||
self.buffer_input(line);
|
||||
let buff = self.input_buff.clone();
|
||||
self.run_query(buff.as_str());
|
||||
} else {
|
||||
self.buffer_input(format!("{line}\n").as_str());
|
||||
self.set_multiline_prompt();
|
||||
self.reset_line();
|
||||
|
||||
// we are taking ownership of input_buff here
|
||||
// its always safe because we split the string in two parts
|
||||
fn take_usable_part(app: &mut Limbo) -> (String, usize) {
|
||||
let ptr = app.input_buff.as_mut_ptr();
|
||||
let (len, cap) = (app.input_buff.len(), app.input_buff.capacity());
|
||||
app.input_buff =
|
||||
ManuallyDrop::new(unsafe { String::from_raw_parts(ptr.add(len), 0, cap - len) });
|
||||
(unsafe { String::from_raw_parts(ptr, len, len) }, unsafe {
|
||||
ptr.add(len).addr()
|
||||
})
|
||||
}
|
||||
|
||||
fn concat_usable_part(app: &mut Limbo, mut part: String, old_address: usize) {
|
||||
let ptr = app.input_buff.as_mut_ptr();
|
||||
let (len, cap) = (app.input_buff.len(), app.input_buff.capacity());
|
||||
|
||||
// if the address is not the same, meaning the string has been reallocated
|
||||
// so we just drop the part we took earlier
|
||||
if ptr.addr() != old_address || !app.input_buff.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let head_ptr = part.as_mut_ptr();
|
||||
let (head_len, head_cap) = (part.len(), part.capacity());
|
||||
forget(part); // move this part into `input_buff`
|
||||
app.input_buff = ManuallyDrop::new(unsafe {
|
||||
String::from_raw_parts(head_ptr, head_len + len, head_cap + cap)
|
||||
});
|
||||
}
|
||||
|
||||
let value = self.input_buff.trim();
|
||||
match (value.starts_with('.'), value.ends_with(';')) {
|
||||
(true, _) => {
|
||||
let (owned_value, old_address) = take_usable_part(self);
|
||||
self.handle_dot_command(owned_value.trim().strip_prefix('.').unwrap());
|
||||
concat_usable_part(self, owned_value, old_address);
|
||||
self.reset_input();
|
||||
}
|
||||
(false, true) => {
|
||||
let (owned_value, old_address) = take_usable_part(self);
|
||||
self.run_query(owned_value.trim());
|
||||
concat_usable_part(self, owned_value, old_address);
|
||||
self.reset_input();
|
||||
}
|
||||
(false, false) if flush => {
|
||||
let (owned_value, old_address) = take_usable_part(self);
|
||||
self.run_query(owned_value.trim());
|
||||
concat_usable_part(self, owned_value, old_address);
|
||||
self.reset_input();
|
||||
}
|
||||
(false, false) => {
|
||||
self.set_multiline_prompt();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_dot_command(&mut self, line: &str) {
|
||||
@@ -1256,35 +1285,23 @@ impl Limbo {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_remaining_input(&mut self) {
|
||||
if self.input_buff.is_empty() {
|
||||
return;
|
||||
}
|
||||
// readline will read inputs from rustyline or stdin
|
||||
// and write it to input_buff.
|
||||
pub fn readline(&mut self) -> Result<(), ReadlineError> {
|
||||
use std::fmt::Write;
|
||||
|
||||
let buff = self.input_buff.clone();
|
||||
self.run_query(buff.as_str());
|
||||
self.reset_input();
|
||||
}
|
||||
|
||||
pub fn readline(&mut self) -> Result<String, ReadlineError> {
|
||||
if let Some(rl) = &mut self.rl {
|
||||
Ok(rl.readline(&self.prompt)?)
|
||||
let result = rl.readline(&self.prompt)?;
|
||||
let _ = self.input_buff.write_str(result.as_str());
|
||||
} else {
|
||||
let mut input = String::new();
|
||||
let mut reader = std::io::stdin().lock();
|
||||
if reader.read_line(&mut input)? == 0 {
|
||||
if reader.read_line(&mut self.input_buff)? == 0 {
|
||||
return Err(ReadlineError::Eof);
|
||||
}
|
||||
// Remove trailing newline
|
||||
if input.ends_with('\n') {
|
||||
input.pop();
|
||||
if input.ends_with('\r') {
|
||||
input.pop();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
let _ = self.input_buff.write_char(' ');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn dump_database_from_conn<W: Write, P: ProgressSink>(
|
||||
@@ -1579,6 +1596,9 @@ fn sql_quote_string(s: &str) -> String {
|
||||
}
|
||||
impl Drop for Limbo {
|
||||
fn drop(&mut self) {
|
||||
self.save_history()
|
||||
self.save_history();
|
||||
unsafe {
|
||||
ManuallyDrop::drop(&mut self.input_buff);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
13
cli/main.rs
13
cli/main.rs
@@ -63,14 +63,8 @@ fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
|
||||
loop {
|
||||
let readline = app.readline();
|
||||
match readline {
|
||||
Ok(line) => match app.handle_input_line(line.trim()) {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
}
|
||||
},
|
||||
match app.readline() {
|
||||
Ok(_) => app.consume(false),
|
||||
Err(ReadlineError::Interrupted) => {
|
||||
// At prompt, increment interrupt count
|
||||
if app.interrupt_count.fetch_add(1, Ordering::SeqCst) >= 1 {
|
||||
@@ -83,7 +77,8 @@ fn main() -> anyhow::Result<()> {
|
||||
continue;
|
||||
}
|
||||
Err(ReadlineError::Eof) => {
|
||||
app.handle_remaining_input();
|
||||
// consume remaining input before exit
|
||||
app.consume(true);
|
||||
let _ = app.close_conn();
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ path = "lib.rs"
|
||||
[features]
|
||||
default = ["fs", "uuid", "time", "json", "series"]
|
||||
antithesis = ["dep:antithesis_sdk"]
|
||||
tracing_release = ["tracing/release_max_level_info"]
|
||||
conn_raw_api = []
|
||||
fs = ["turso_ext/vfs"]
|
||||
json = []
|
||||
@@ -101,7 +102,6 @@ rand = "0.8.5" # Required for quickcheck
|
||||
rand_chacha = "0.9.0"
|
||||
env_logger = "0.11.6"
|
||||
test-log = { version = "0.2.17", features = ["trace"] }
|
||||
lru = "0.14.0"
|
||||
sorted-vec = "0.8.6"
|
||||
mimalloc = { version = "0.1.46", default-features = false }
|
||||
|
||||
|
||||
@@ -124,6 +124,8 @@ pub enum CompletionError {
|
||||
Aborted,
|
||||
#[error("Decryption failed for page={page_idx}")]
|
||||
DecryptionError { page_idx: usize },
|
||||
#[error("I/O error: partial write")]
|
||||
ShortWrite,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
||||
@@ -64,24 +64,35 @@ fn exec_datetime(values: &[Register], output_type: DateTimeOutput) -> Value {
|
||||
}
|
||||
|
||||
fn modify_dt(dt: &mut NaiveDateTime, mods: &[Register], output_type: DateTimeOutput) -> Value {
|
||||
let mut n_floor: i64 = 0;
|
||||
let mut subsec_requested = false;
|
||||
|
||||
for modifier in mods {
|
||||
if let Value::Text(ref text_rc) = modifier.get_value() {
|
||||
// TODO: to prevent double conversion and properly support 'utc'/'localtime', we also
|
||||
// need to keep track of the current timezone and apply it to the modifier.
|
||||
match apply_modifier(dt, text_rc.as_str()) {
|
||||
let parsed = parse_modifier(text_rc.as_str());
|
||||
if !matches!(parsed, Ok(Modifier::Floor) | Ok(Modifier::Ceiling)) {
|
||||
n_floor = 0;
|
||||
}
|
||||
|
||||
match apply_modifier(dt, text_rc.as_str(), &mut n_floor) {
|
||||
Ok(true) => subsec_requested = true,
|
||||
Ok(false) => {}
|
||||
Err(_) => return Value::build_text(""),
|
||||
}
|
||||
|
||||
if matches!(parsed, Ok(Modifier::Floor) | Ok(Modifier::Ceiling)) {
|
||||
n_floor = 0;
|
||||
}
|
||||
} else {
|
||||
return Value::build_text("");
|
||||
}
|
||||
}
|
||||
|
||||
if is_leap_second(dt) || *dt > get_max_datetime_exclusive() {
|
||||
return Value::build_text("");
|
||||
}
|
||||
|
||||
format_dt(*dt, output_type, subsec_requested)
|
||||
}
|
||||
|
||||
@@ -97,7 +108,7 @@ fn format_dt(dt: NaiveDateTime, output_type: DateTimeOutput, subsec: bool) -> Va
|
||||
Value::from_text(t.as_str())
|
||||
}
|
||||
DateTimeOutput::DateTime => {
|
||||
let t = if subsec {
|
||||
let t = if subsec && dt.nanosecond() != 0 {
|
||||
dt.format("%Y-%m-%d %H:%M:%S%.3f").to_string()
|
||||
} else {
|
||||
dt.format("%Y-%m-%d %H:%M:%S").to_string()
|
||||
@@ -136,9 +147,7 @@ fn strftime_format(dt: &NaiveDateTime, format_str: &str) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
// to prevent stripping the modifier string and comparing multiple times, this returns
|
||||
// whether the modifier was a subsec modifier because it impacts the format string
|
||||
fn apply_modifier(dt: &mut NaiveDateTime, modifier: &str) -> Result<bool> {
|
||||
fn apply_modifier(dt: &mut NaiveDateTime, modifier: &str, n_floor: &mut i64) -> Result<bool> {
|
||||
let parsed_modifier = parse_modifier(modifier)?;
|
||||
|
||||
match parsed_modifier {
|
||||
@@ -150,10 +159,10 @@ fn apply_modifier(dt: &mut NaiveDateTime, modifier: &str) -> Result<bool> {
|
||||
// Convert months to years + leftover months
|
||||
let years = m / 12;
|
||||
let leftover = m % 12;
|
||||
add_years_and_months(dt, years, leftover)?;
|
||||
add_years_and_months(dt, years, leftover, n_floor)?;
|
||||
}
|
||||
Modifier::Years(y) => {
|
||||
add_years_and_months(dt, y, 0)?;
|
||||
add_years_and_months(dt, y, 0, n_floor)?;
|
||||
}
|
||||
Modifier::TimeOffset(offset) => *dt += offset,
|
||||
Modifier::DateOffset {
|
||||
@@ -161,9 +170,7 @@ fn apply_modifier(dt: &mut NaiveDateTime, modifier: &str) -> Result<bool> {
|
||||
months,
|
||||
days,
|
||||
} => {
|
||||
*dt = dt
|
||||
.checked_add_months(chrono::Months::new((years * 12 + months) as u32))
|
||||
.ok_or_else(|| InvalidModifier("Invalid date offset".to_string()))?;
|
||||
add_years_and_months(dt, years, months, n_floor)?;
|
||||
*dt += TimeDelta::days(days as i64);
|
||||
}
|
||||
Modifier::DateTimeOffset {
|
||||
@@ -172,12 +179,20 @@ fn apply_modifier(dt: &mut NaiveDateTime, modifier: &str) -> Result<bool> {
|
||||
days,
|
||||
seconds,
|
||||
} => {
|
||||
add_years_and_months(dt, years, months)?;
|
||||
add_years_and_months(dt, years, months, n_floor)?;
|
||||
*dt += chrono::Duration::days(days as i64);
|
||||
*dt += chrono::Duration::seconds(seconds.into());
|
||||
}
|
||||
Modifier::Ceiling => todo!(),
|
||||
Modifier::Floor => todo!(),
|
||||
Modifier::Floor => {
|
||||
if *n_floor <= 0 {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
*dt -= TimeDelta::days(*n_floor);
|
||||
}
|
||||
Modifier::Ceiling => {
|
||||
*n_floor = 0;
|
||||
}
|
||||
Modifier::StartOfMonth => {
|
||||
*dt = NaiveDate::from_ymd_opt(dt.year(), dt.month(), 1)
|
||||
.unwrap()
|
||||
@@ -224,16 +239,22 @@ fn is_julian_day_value(value: f64) -> bool {
|
||||
(0.0..5373484.5).contains(&value)
|
||||
}
|
||||
|
||||
fn add_years_and_months(dt: &mut NaiveDateTime, years: i32, months: i32) -> Result<()> {
|
||||
add_whole_years(dt, years)?;
|
||||
add_months_in_increments(dt, months)?;
|
||||
fn add_years_and_months(
|
||||
dt: &mut NaiveDateTime,
|
||||
years: i32,
|
||||
months: i32,
|
||||
n_floor: &mut i64,
|
||||
) -> Result<()> {
|
||||
add_whole_years(dt, years, n_floor)?;
|
||||
add_months_in_increments(dt, months, n_floor)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_whole_years(dt: &mut NaiveDateTime, years: i32) -> Result<()> {
|
||||
fn add_whole_years(dt: &mut NaiveDateTime, years: i32, n_floor: &mut i64) -> Result<()> {
|
||||
if years == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let target_year = dt.year() + years;
|
||||
let (m, d, hh, mm, ss) = (dt.month(), dt.day(), dt.hour(), dt.minute(), dt.second());
|
||||
|
||||
@@ -257,16 +278,17 @@ fn add_whole_years(dt: &mut NaiveDateTime, years: i32) -> Result<()> {
|
||||
.ok_or_else(|| InvalidModifier("Invalid time format".to_string()))?;
|
||||
|
||||
*dt = base_date + chrono::Duration::days(leftover as i64);
|
||||
*n_floor += leftover as i64;
|
||||
} else {
|
||||
// do we fall back here?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn add_months_in_increments(dt: &mut NaiveDateTime, months: i32) -> Result<()> {
|
||||
fn add_months_in_increments(dt: &mut NaiveDateTime, months: i32, n_floor: &mut i64) -> Result<()> {
|
||||
let step = if months >= 0 { 1 } else { -1 };
|
||||
for _ in 0..months.abs() {
|
||||
add_one_month(dt, step)?;
|
||||
add_one_month(dt, step, n_floor)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -277,7 +299,7 @@ fn add_months_in_increments(dt: &mut NaiveDateTime, months: i32) -> Result<()> {
|
||||
//
|
||||
// the modifiers 'ceiling' and 'floor' will determine behavior, so we'll need to eagerly
|
||||
// evaluate modifiers in the future to support those, and 'julianday'/'unixepoch'
|
||||
fn add_one_month(dt: &mut NaiveDateTime, step: i32) -> Result<()> {
|
||||
fn add_one_month(dt: &mut NaiveDateTime, step: i32, n_floor: &mut i64) -> Result<()> {
|
||||
let (y0, m0, d0) = (dt.year(), dt.month(), dt.day());
|
||||
let (hh, mm, ss) = (dt.hour(), dt.minute(), dt.second());
|
||||
|
||||
@@ -306,6 +328,7 @@ fn add_one_month(dt: &mut NaiveDateTime, step: i32) -> Result<()> {
|
||||
.ok_or_else(|| InvalidModifier("Invalid Auto format".to_string()))?;
|
||||
|
||||
*dt = base_date + chrono::Duration::days(leftover as i64);
|
||||
*n_floor += leftover as i64;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1166,6 +1189,12 @@ mod tests {
|
||||
assert_eq!(parse_modifier("WEEKDAY 6").unwrap(), Modifier::Weekday(6));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ceiling_modifier() {
|
||||
assert_eq!(parse_modifier("ceiling").unwrap(), Modifier::Ceiling);
|
||||
assert_eq!(parse_modifier("CEILING").unwrap(), Modifier::Ceiling);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_other_modifiers() {
|
||||
assert_eq!(parse_modifier("unixepoch").unwrap(), Modifier::UnixEpoch);
|
||||
@@ -1214,89 +1243,106 @@ mod tests {
|
||||
#[test]
|
||||
fn test_apply_modifier_days() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "5 days").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "5 days", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 20, 12, 30, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-3 days").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-3 days", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 12, 12, 30, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_hours() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "6 hours").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "6 hours", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 18, 30, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-2 hours").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-2 hours", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 10, 30, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_minutes() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "45 minutes").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "45 minutes", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 13, 15, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-15 minutes").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-15 minutes", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 12, 15, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_seconds() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "30 seconds").unwrap();
|
||||
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "30 seconds", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 12, 31, 15));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-20 seconds").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-20 seconds", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 12, 30, 25));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_time_offset() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "+01:30").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "+01:30", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 14, 0, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-00:45").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-00:45", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 11, 45, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_date_time_offset() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "+0001-01-01 01:01").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "+0001-01-01 01:01", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2024, 7, 16, 13, 31, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-0001-01-01 01:01").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-0001-01-01 01:01", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2022, 5, 14, 11, 29, 45));
|
||||
|
||||
// Test with larger offsets
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "+0002-03-04 05:06").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "+0002-03-04 05:06", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2025, 9, 19, 17, 36, 45));
|
||||
|
||||
dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "-0002-03-04 05:06").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "-0002-03-04 05:06", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2021, 3, 11, 7, 24, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_start_of_year() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "start of year").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of year", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 1, 0, 0, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_start_of_day() {
|
||||
let mut dt = setup_datetime();
|
||||
apply_modifier(&mut dt, "start of day").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of day", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 0, 0, 0));
|
||||
}
|
||||
|
||||
@@ -1475,7 +1521,8 @@ mod tests {
|
||||
fn test_already_on_weekday_no_change() {
|
||||
// 2023-01-01 is a Sunday => weekday 0
|
||||
let mut dt = create_datetime(2023, 1, 1, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 0").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 0", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 1, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 0);
|
||||
}
|
||||
@@ -1485,14 +1532,16 @@ mod tests {
|
||||
// 2023-01-01 is a Sunday => weekday 0
|
||||
// "weekday 1" => next Monday => 2023-01-02
|
||||
let mut dt = create_datetime(2023, 1, 1, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 1").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 1", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 2, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 1);
|
||||
|
||||
// 2023-01-03 is a Tuesday => weekday 2
|
||||
// "weekday 5" => next Friday => 2023-01-06
|
||||
let mut dt = create_datetime(2023, 1, 3, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 5").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 5", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 6, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 5);
|
||||
}
|
||||
@@ -1502,12 +1551,13 @@ mod tests {
|
||||
// 2023-01-06 is a Friday => weekday 5
|
||||
// "weekday 0" => next Sunday => 2023-01-08
|
||||
let mut dt = create_datetime(2023, 1, 6, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 0").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 0", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 8, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 0);
|
||||
|
||||
// Now confirm that being on Sunday (weekday 0) and asking for "weekday 0" stays put
|
||||
apply_modifier(&mut dt, "weekday 0").unwrap();
|
||||
apply_modifier(&mut dt, "weekday 0", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 8, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 0);
|
||||
}
|
||||
@@ -1517,7 +1567,8 @@ mod tests {
|
||||
// 2023-01-05 is a Thursday => weekday 4
|
||||
// Asking for weekday 4 => no change
|
||||
let mut dt = create_datetime(2023, 1, 5, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 4").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 4", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 5, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 4);
|
||||
}
|
||||
@@ -1527,7 +1578,8 @@ mod tests {
|
||||
// 2023-01-06 is a Friday => weekday 5
|
||||
// Asking for weekday 5 => no change if already on Friday
|
||||
let mut dt = create_datetime(2023, 1, 6, 12, 0, 0);
|
||||
apply_modifier(&mut dt, "weekday 5").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "weekday 5", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 1, 6, 12, 0, 0));
|
||||
assert_eq!(weekday_sunday_based(&dt), 5);
|
||||
}
|
||||
@@ -1549,7 +1601,8 @@ mod tests {
|
||||
#[test]
|
||||
fn test_apply_modifier_start_of_month() {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
apply_modifier(&mut dt, "start of month").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of month", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 1, 0, 0, 0));
|
||||
}
|
||||
|
||||
@@ -1558,15 +1611,48 @@ mod tests {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
let dt_with_nanos = dt.with_nanosecond(123_456_789).unwrap();
|
||||
dt = dt_with_nanos;
|
||||
apply_modifier(&mut dt, "subsec").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "subsec", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, dt_with_nanos);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_floor_modifier_n_floor_gt_0() {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
let mut n_floor = 3;
|
||||
|
||||
apply_modifier(&mut dt, "floor", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 12, 12, 30, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_floor_modifier_n_floor_le_0() {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
let mut n_floor = 0;
|
||||
|
||||
apply_modifier(&mut dt, "floor", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 15, 12, 30, 45));
|
||||
|
||||
n_floor = 2;
|
||||
apply_modifier(&mut dt, "floor", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 13, 12, 30, 45));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_ceiling_modifier_sets_n_floor_to_zero() {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
let mut n_floor = 5;
|
||||
|
||||
apply_modifier(&mut dt, "ceiling", &mut n_floor).unwrap();
|
||||
assert_eq!(n_floor, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_modifier_start_of_month_basic() {
|
||||
// Basic check: from mid-month to the 1st at 00:00:00.
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
apply_modifier(&mut dt, "start of month").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of month", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 1, 0, 0, 0));
|
||||
}
|
||||
|
||||
@@ -1574,7 +1660,8 @@ mod tests {
|
||||
fn test_apply_modifier_start_of_month_already_at_first() {
|
||||
// If we're already at the start of the month, no change.
|
||||
let mut dt = create_datetime(2023, 6, 1, 0, 0, 0);
|
||||
apply_modifier(&mut dt, "start of month").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of month", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 6, 1, 0, 0, 0));
|
||||
}
|
||||
|
||||
@@ -1582,7 +1669,8 @@ mod tests {
|
||||
fn test_apply_modifier_start_of_month_edge_case() {
|
||||
// edge case: month boundary. 2023-07-31 -> start of July.
|
||||
let mut dt = create_datetime(2023, 7, 31, 23, 59, 59);
|
||||
apply_modifier(&mut dt, "start of month").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "start of month", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, create_datetime(2023, 7, 1, 0, 0, 0));
|
||||
}
|
||||
|
||||
@@ -1591,7 +1679,8 @@ mod tests {
|
||||
let mut dt = create_datetime(2023, 6, 15, 12, 30, 45);
|
||||
let dt_with_nanos = dt.with_nanosecond(123_456_789).unwrap();
|
||||
dt = dt_with_nanos;
|
||||
apply_modifier(&mut dt, "subsec").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "subsec", &mut n_floor).unwrap();
|
||||
assert_eq!(dt, dt_with_nanos);
|
||||
}
|
||||
|
||||
@@ -1600,7 +1689,8 @@ mod tests {
|
||||
let mut dt = create_datetime(2025, 1, 2, 4, 12, 21)
|
||||
.with_nanosecond(891_000_000) // 891 milliseconds
|
||||
.unwrap();
|
||||
apply_modifier(&mut dt, "subsec").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "subsec", &mut n_floor).unwrap();
|
||||
|
||||
let formatted = dt.format("%Y-%m-%d %H:%M:%S%.3f").to_string();
|
||||
assert_eq!(formatted, "2025-01-02 04:12:21.891");
|
||||
@@ -1609,7 +1699,8 @@ mod tests {
|
||||
#[test]
|
||||
fn test_apply_modifier_subsec_no_fractional_seconds() {
|
||||
let mut dt = create_datetime(2025, 1, 2, 4, 12, 21);
|
||||
apply_modifier(&mut dt, "subsec").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "subsec", &mut n_floor).unwrap();
|
||||
|
||||
let formatted = dt.format("%Y-%m-%d %H:%M:%S%.3f").to_string();
|
||||
assert_eq!(formatted, "2025-01-02 04:12:21.000");
|
||||
@@ -1620,7 +1711,8 @@ mod tests {
|
||||
let mut dt = create_datetime(2025, 1, 2, 4, 12, 21)
|
||||
.with_nanosecond(891_123_456)
|
||||
.unwrap();
|
||||
apply_modifier(&mut dt, "subsec").unwrap();
|
||||
let mut n_floor = 0;
|
||||
apply_modifier(&mut dt, "subsec", &mut n_floor).unwrap();
|
||||
|
||||
let formatted = dt.format("%Y-%m-%d %H:%M:%S%.3f").to_string();
|
||||
assert_eq!(formatted, "2025-01-02 04:12:21.891");
|
||||
|
||||
@@ -37,7 +37,7 @@ impl IO for GenericIO {
|
||||
}
|
||||
|
||||
#[instrument(err, skip_all, level = Level::TRACE)]
|
||||
fn run_once(&self) -> Result<()> {
|
||||
fn step(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
use super::{common, Completion, CompletionInner, File, OpenFlags, IO};
|
||||
use crate::io::clock::{Clock, Instant};
|
||||
use crate::storage::wal::CKPT_BATCH_PAGES;
|
||||
use crate::{turso_assert, LimboError, Result};
|
||||
use crate::{turso_assert, CompletionError, LimboError, Result};
|
||||
use parking_lot::Mutex;
|
||||
use rustix::fs::{self, FlockOperation, OFlags};
|
||||
use std::ptr::NonNull;
|
||||
@@ -48,6 +48,9 @@ const ARENA_COUNT: usize = 2;
|
||||
/// writing a commit frame.
|
||||
const BARRIER_USER_DATA: u64 = 1;
|
||||
|
||||
/// user_data tag for cancellation operations
|
||||
const CANCEL_TAG: u64 = 1;
|
||||
|
||||
pub struct UringIO {
|
||||
inner: Arc<Mutex<InnerUringIO>>,
|
||||
}
|
||||
@@ -317,6 +320,18 @@ impl WrappedIOUring {
|
||||
self.ring.submit().expect("submiting when full");
|
||||
}
|
||||
|
||||
fn submit_cancel_urgent(&mut self, entry: &io_uring::squeue::Entry) -> Result<()> {
|
||||
let pushed = unsafe { self.ring.submission().push(entry).is_ok() };
|
||||
if pushed {
|
||||
self.pending_ops += 1;
|
||||
return Ok(());
|
||||
}
|
||||
// place cancel op at the front, if overflowed
|
||||
self.overflow.push_front(entry.clone());
|
||||
self.ring.submit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Flush overflow entries to submission queue when possible
|
||||
fn flush_overflow(&mut self) -> Result<()> {
|
||||
while !self.overflow.is_empty() {
|
||||
@@ -468,10 +483,18 @@ impl WrappedIOUring {
|
||||
}
|
||||
|
||||
let written = result;
|
||||
|
||||
// guard against no-progress loop
|
||||
if written == 0 && state.remaining() > 0 {
|
||||
state.free_last_iov(&mut self.iov_pool);
|
||||
completion_from_key(user_data).error(CompletionError::ShortWrite);
|
||||
return;
|
||||
}
|
||||
state.advance(written as u64);
|
||||
|
||||
match state.remaining() {
|
||||
0 => {
|
||||
tracing::info!(
|
||||
tracing::debug!(
|
||||
"writev operation completed: wrote {} bytes",
|
||||
state.total_written
|
||||
);
|
||||
@@ -546,8 +569,63 @@ impl IO for UringIO {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_once(&self) -> Result<()> {
|
||||
trace!("run_once()");
|
||||
/// Drain calls `run_once` in a loop until the ring is empty.
|
||||
/// To prevent mutex churn of checking if ring.empty() on each iteration, we violate DRY
|
||||
fn drain(&self) -> Result<()> {
|
||||
trace!("drain()");
|
||||
let mut inner = self.inner.lock();
|
||||
let ring = &mut inner.ring;
|
||||
loop {
|
||||
ring.flush_overflow()?;
|
||||
if ring.empty() {
|
||||
return Ok(());
|
||||
}
|
||||
ring.submit_and_wait()?;
|
||||
'inner: loop {
|
||||
let Some(cqe) = ring.ring.completion().next() else {
|
||||
break 'inner;
|
||||
};
|
||||
ring.pending_ops -= 1;
|
||||
let user_data = cqe.user_data();
|
||||
if user_data == CANCEL_TAG {
|
||||
// ignore if this is a cancellation CQE
|
||||
continue 'inner;
|
||||
}
|
||||
let result = cqe.result();
|
||||
turso_assert!(
|
||||
user_data != 0,
|
||||
"user_data must not be zero, we dont submit linked timeouts that would cause this"
|
||||
);
|
||||
if let Some(state) = ring.writev_states.remove(&user_data) {
|
||||
// if we have ongoing writev state, handle it separately and don't call completion
|
||||
ring.handle_writev_completion(state, user_data, result);
|
||||
continue 'inner;
|
||||
}
|
||||
if result < 0 {
|
||||
let errno = -result;
|
||||
let err = std::io::Error::from_raw_os_error(errno);
|
||||
completion_from_key(user_data).error(err.into());
|
||||
} else {
|
||||
completion_from_key(user_data).complete(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cancel(&self, completions: &[Completion]) -> Result<()> {
|
||||
let mut inner = self.inner.lock();
|
||||
for c in completions {
|
||||
c.abort();
|
||||
let e = io_uring::opcode::AsyncCancel::new(get_key(c.clone()))
|
||||
.build()
|
||||
.user_data(CANCEL_TAG);
|
||||
inner.ring.submit_cancel_urgent(&e)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn step(&self) -> Result<()> {
|
||||
trace!("step()");
|
||||
let mut inner = self.inner.lock();
|
||||
let ring = &mut inner.ring;
|
||||
ring.flush_overflow()?;
|
||||
@@ -561,11 +639,15 @@ impl IO for UringIO {
|
||||
};
|
||||
ring.pending_ops -= 1;
|
||||
let user_data = cqe.user_data();
|
||||
if user_data == CANCEL_TAG {
|
||||
// ignore if this is a cancellation CQE
|
||||
continue;
|
||||
}
|
||||
let result = cqe.result();
|
||||
turso_assert!(
|
||||
user_data != 0,
|
||||
"user_data must not be zero, we dont submit linked timeouts or cancelations that would cause this"
|
||||
);
|
||||
user_data != 0,
|
||||
"user_data must not be zero, we dont submit linked timeouts that would cause this"
|
||||
);
|
||||
if let Some(state) = ring.writev_states.remove(&user_data) {
|
||||
// if we have ongoing writev state, handle it separately and don't call completion
|
||||
ring.handle_writev_completion(state, user_data, result);
|
||||
@@ -579,7 +661,13 @@ impl IO for UringIO {
|
||||
}
|
||||
continue;
|
||||
}
|
||||
completion_from_key(user_data).complete(result)
|
||||
if result < 0 {
|
||||
let errno = -result;
|
||||
let err = std::io::Error::from_raw_os_error(errno);
|
||||
completion_from_key(user_data).error(err.into());
|
||||
} else {
|
||||
completion_from_key(user_data).complete(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -50,9 +50,6 @@ pub trait File: Send + Sync {
|
||||
})
|
||||
};
|
||||
if let Err(e) = self.pwrite(pos, buf.clone(), child_c) {
|
||||
// best-effort: mark as abort so caller won't wait forever
|
||||
// TODO: when we have `pwrite` and other I/O methods return CompletionError
|
||||
// instead of LimboError, store the error inside
|
||||
c.abort();
|
||||
return Err(e);
|
||||
}
|
||||
@@ -87,13 +84,22 @@ pub trait IO: Clock + Send + Sync {
|
||||
// remove_file is used in the sync-engine
|
||||
fn remove_file(&self, path: &str) -> Result<()>;
|
||||
|
||||
fn run_once(&self) -> Result<()> {
|
||||
fn step(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cancel(&self, c: &[Completion]) -> Result<()> {
|
||||
c.iter().for_each(|c| c.abort());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn drain(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn wait_for_completion(&self, c: Completion) -> Result<()> {
|
||||
while !c.finished() {
|
||||
self.run_once()?
|
||||
self.step()?
|
||||
}
|
||||
if let Some(Some(err)) = c.inner.result.get().copied() {
|
||||
return Err(err.into());
|
||||
|
||||
@@ -120,7 +120,7 @@ impl IO for UnixIO {
|
||||
}
|
||||
|
||||
#[instrument(err, skip_all, level = Level::TRACE)]
|
||||
fn run_once(&self) -> Result<()> {
|
||||
fn step(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ impl IO for VfsMod {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn run_once(&self) -> Result<()> {
|
||||
fn step(&self) -> Result<()> {
|
||||
if self.ctx.is_null() {
|
||||
return Err(LimboError::ExtensionError("VFS is null".to_string()));
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ impl IO for WindowsIO {
|
||||
}
|
||||
|
||||
#[instrument(err, skip_all, level = Level::TRACE)]
|
||||
fn run_once(&self) -> Result<()> {
|
||||
fn step(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::json::error::{Error as PError, Result as PResult};
|
||||
use crate::json::Conv;
|
||||
use crate::{bail_parse_error, LimboError, Result};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
@@ -742,7 +743,15 @@ impl JsonbHeader {
|
||||
Self(ElementType::OBJECT, 0)
|
||||
}
|
||||
|
||||
fn from_slice(cursor: usize, slice: &[u8]) -> Result<(Self, usize)> {
|
||||
pub(super) fn element_type(&self) -> ElementType {
|
||||
self.0
|
||||
}
|
||||
|
||||
pub(super) fn payload_size(&self) -> PayloadSize {
|
||||
self.1
|
||||
}
|
||||
|
||||
pub(super) fn from_slice(cursor: usize, slice: &[u8]) -> Result<(Self, usize)> {
|
||||
match slice.get(cursor) {
|
||||
Some(header_byte) => {
|
||||
// Extract first 4 bits (values 0-15)
|
||||
@@ -921,6 +930,96 @@ impl Jsonb {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_valid(&self) -> bool {
|
||||
self.validate_element(0, self.data.len(), 0).is_ok()
|
||||
}
|
||||
|
||||
fn validate_element(&self, start: usize, end: usize, depth: usize) -> Result<()> {
|
||||
if depth > MAX_JSON_DEPTH {
|
||||
bail_parse_error!("Too deep");
|
||||
}
|
||||
|
||||
if start >= end {
|
||||
bail_parse_error!("Empty element");
|
||||
}
|
||||
|
||||
let (header, header_offset) = self.read_header(start)?;
|
||||
let payload_start = start + header_offset;
|
||||
let payload_size = header.payload_size();
|
||||
let payload_end = payload_start + payload_size;
|
||||
|
||||
if payload_end != end {
|
||||
bail_parse_error!("Size mismatch");
|
||||
}
|
||||
|
||||
match header.element_type() {
|
||||
ElementType::NULL | ElementType::TRUE | ElementType::FALSE => {
|
||||
if payload_size == 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
bail_parse_error!("Invalid payload for primitive")
|
||||
}
|
||||
}
|
||||
ElementType::INT | ElementType::INT5 | ElementType::FLOAT | ElementType::FLOAT5 => {
|
||||
if payload_size > 0 {
|
||||
Ok(())
|
||||
} else {
|
||||
bail_parse_error!("Empty number payload")
|
||||
}
|
||||
}
|
||||
ElementType::TEXT | ElementType::TEXTJ | ElementType::TEXT5 | ElementType::TEXTRAW => {
|
||||
let payload = &self.data[payload_start..payload_end];
|
||||
std::str::from_utf8(payload).map_err(|_| {
|
||||
LimboError::ParseError("Invalid UTF-8 in text payload".to_string())
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
ElementType::ARRAY => {
|
||||
let mut pos = payload_start;
|
||||
while pos < payload_end {
|
||||
if pos >= self.data.len() {
|
||||
bail_parse_error!("Array element out of bounds");
|
||||
}
|
||||
let (elem_header, elem_header_size) = self.read_header(pos)?;
|
||||
let elem_end = pos + elem_header_size + elem_header.payload_size();
|
||||
if elem_end > payload_end {
|
||||
bail_parse_error!("Array element exceeds bounds");
|
||||
}
|
||||
self.validate_element(pos, elem_end, depth + 1)?;
|
||||
pos = elem_end;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
ElementType::OBJECT => {
|
||||
let mut pos = payload_start;
|
||||
let mut count = 0;
|
||||
while pos < payload_end {
|
||||
if pos >= self.data.len() {
|
||||
bail_parse_error!("Object element out of bounds");
|
||||
}
|
||||
let (elem_header, elem_header_size) = self.read_header(pos)?;
|
||||
if count % 2 == 0 && !elem_header.element_type().is_valid_key() {
|
||||
bail_parse_error!("Object key must be text");
|
||||
}
|
||||
|
||||
let elem_end = pos + elem_header_size + elem_header.payload_size();
|
||||
if elem_end > payload_end {
|
||||
bail_parse_error!("Object element exceeds bounds");
|
||||
}
|
||||
self.validate_element(pos, elem_end, depth + 1)?;
|
||||
pos = elem_end;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
if count % 2 != 0 {
|
||||
bail_parse_error!("Object must have even number of elements");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => bail_parse_error!("Invalid element type"),
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::inherent_to_string)]
|
||||
pub fn to_string(&self) -> String {
|
||||
let mut result = String::with_capacity(self.data.len() * 2);
|
||||
@@ -2170,6 +2269,18 @@ impl Jsonb {
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn from_str_with_mode(input: &str, mode: Conv) -> PResult<Self> {
|
||||
// Parse directly as JSON if it's already JSON subtype or strict mode is on
|
||||
if matches!(mode, Conv::ToString) {
|
||||
let mut str = input.replace('"', "\\\"");
|
||||
str.insert(0, '"');
|
||||
str.push('"');
|
||||
Jsonb::from_str(&str)
|
||||
} else {
|
||||
Jsonb::from_str(input)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_raw_data(data: &[u8]) -> Self {
|
||||
Self::new(data.len(), Some(data))
|
||||
}
|
||||
|
||||
@@ -118,24 +118,20 @@ pub fn convert_dbtype_to_jsonb(val: &Value, strict: Conv) -> crate::Result<Jsonb
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_as_json_text(slice: &[u8]) -> crate::Result<Jsonb> {
|
||||
let str = std::str::from_utf8(slice)
|
||||
.map_err(|_| LimboError::ParseError("malformed JSON".to_string()))?;
|
||||
Jsonb::from_str_with_mode(str, Conv::Strict).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn convert_ref_dbtype_to_jsonb(val: &RefValue, strict: Conv) -> crate::Result<Jsonb> {
|
||||
match val {
|
||||
RefValue::Text(text) => {
|
||||
let res = if text.subtype == TextSubtype::Json || matches!(strict, Conv::Strict) {
|
||||
// Parse directly as JSON if it's already JSON subtype or strict mode is on
|
||||
let json = if matches!(strict, Conv::ToString) {
|
||||
let mut str = text.as_str().replace('"', "\\\"");
|
||||
str.insert(0, '"');
|
||||
str.push('"');
|
||||
Jsonb::from_str(&str)
|
||||
} else {
|
||||
Jsonb::from_str(text.as_str())
|
||||
};
|
||||
json
|
||||
Jsonb::from_str_with_mode(text.as_str(), strict)
|
||||
} else {
|
||||
// Handle as a string literal otherwise
|
||||
let mut str = text.as_str().replace('"', "\\\"");
|
||||
|
||||
// Quote the string to make it a JSON string
|
||||
str.insert(0, '"');
|
||||
str.push('"');
|
||||
@@ -144,7 +140,40 @@ pub fn convert_ref_dbtype_to_jsonb(val: &RefValue, strict: Conv) -> crate::Resul
|
||||
res.map_err(|_| LimboError::ParseError("malformed JSON".to_string()))
|
||||
}
|
||||
RefValue::Blob(blob) => {
|
||||
let json = Jsonb::from_raw_data(blob.to_slice());
|
||||
let bytes = blob.to_slice();
|
||||
// Valid JSON can start with these whitespace characters
|
||||
let index = bytes
|
||||
.iter()
|
||||
.position(|&b| !matches!(b, b' ' | b'\t' | b'\n' | b'\r'))
|
||||
.unwrap_or(bytes.len());
|
||||
let slice = &bytes[index..];
|
||||
let json = match slice {
|
||||
// branch with no overlapping initial byte
|
||||
[b'"', ..] | [b'-', ..] | [b'0'..=b'2', ..] => parse_as_json_text(slice)?,
|
||||
_ => match JsonbHeader::from_slice(0, slice) {
|
||||
Ok((header, header_offset)) => {
|
||||
let payload_size = header.payload_size();
|
||||
let total_expected = header_offset + payload_size;
|
||||
|
||||
if total_expected != slice.len() {
|
||||
parse_as_json_text(slice)?
|
||||
} else {
|
||||
let jsonb = Jsonb::from_raw_data(slice);
|
||||
let is_valid_json = if payload_size <= 7 {
|
||||
jsonb.is_valid()
|
||||
} else {
|
||||
jsonb.element_type().is_ok()
|
||||
};
|
||||
if is_valid_json {
|
||||
jsonb
|
||||
} else {
|
||||
parse_as_json_text(slice)?
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => parse_as_json_text(slice)?,
|
||||
},
|
||||
};
|
||||
json.element_type()?;
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
@@ -1106,6 +1106,7 @@ impl Connection {
|
||||
"The supplied SQL string contains no statements".to_string(),
|
||||
));
|
||||
}
|
||||
self.maybe_update_schema()?;
|
||||
let sql = sql.as_ref();
|
||||
tracing::trace!("Preparing and executing batch: {}", sql);
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
@@ -1143,6 +1144,7 @@ impl Connection {
|
||||
return Err(LimboError::InternalError("Connection closed".to_string()));
|
||||
}
|
||||
let sql = sql.as_ref();
|
||||
self.maybe_update_schema()?;
|
||||
tracing::trace!("Querying: {}", sql);
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
let cmd = parser.next_cmd()?;
|
||||
@@ -1216,6 +1218,7 @@ impl Connection {
|
||||
return Err(LimboError::InternalError("Connection closed".to_string()));
|
||||
}
|
||||
let sql = sql.as_ref();
|
||||
self.maybe_update_schema()?;
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
while let Some(cmd) = parser.next_cmd()? {
|
||||
let syms = self.syms.borrow();
|
||||
@@ -1224,7 +1227,6 @@ impl Connection {
|
||||
let input = str::from_utf8(&sql.as_bytes()[..byte_offset_end])
|
||||
.unwrap()
|
||||
.trim();
|
||||
self.maybe_update_schema()?;
|
||||
match cmd {
|
||||
Cmd::Explain(stmt) => {
|
||||
let program = translate::translate(
|
||||
@@ -2212,7 +2214,7 @@ impl Statement {
|
||||
}
|
||||
|
||||
pub fn run_once(&self) -> Result<()> {
|
||||
let res = self.pager.io.run_once();
|
||||
let res = self.pager.io.step();
|
||||
if self.program.connection.is_nested_stmt.get() {
|
||||
return res;
|
||||
}
|
||||
|
||||
@@ -1309,7 +1309,7 @@ fn test_concurrent_writes() {
|
||||
}
|
||||
}
|
||||
}
|
||||
db.get_db().io.run_once().unwrap();
|
||||
db.get_db().io.step().unwrap();
|
||||
|
||||
if all_finished {
|
||||
break;
|
||||
|
||||
@@ -46,6 +46,44 @@ pub enum Numeric {
|
||||
}
|
||||
|
||||
impl Numeric {
|
||||
pub fn from_value_strict(value: &Value) -> Numeric {
|
||||
match value {
|
||||
Value::Null | Value::Blob(_) => Self::Null,
|
||||
Value::Integer(v) => Self::Integer(*v),
|
||||
Value::Float(v) => match NonNan::new(*v) {
|
||||
Some(v) => Self::Float(v),
|
||||
None => Self::Null,
|
||||
},
|
||||
Value::Text(text) => {
|
||||
let s = text.as_str();
|
||||
|
||||
match str_to_f64(s) {
|
||||
None
|
||||
| Some(StrToF64::FractionalPrefix(_))
|
||||
| Some(StrToF64::DecimalPrefix(_)) => Self::Null,
|
||||
Some(StrToF64::Fractional(value)) => Self::Float(value),
|
||||
Some(StrToF64::Decimal(real)) => {
|
||||
let integer = str_to_i64(s).unwrap_or(0);
|
||||
|
||||
if real == integer as f64 {
|
||||
Self::Integer(integer)
|
||||
} else {
|
||||
Self::Float(real)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_into_f64(&self) -> Option<f64> {
|
||||
match self {
|
||||
Numeric::Null => None,
|
||||
Numeric::Integer(v) => Some(*v as _),
|
||||
Numeric::Float(v) => Some((*v).into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_into_bool(&self) -> Option<bool> {
|
||||
match self {
|
||||
Numeric::Null => None,
|
||||
@@ -82,8 +120,10 @@ impl<T: AsRef<str>> From<T> for Numeric {
|
||||
|
||||
match str_to_f64(text) {
|
||||
None => Self::Integer(0),
|
||||
Some(StrToF64::Fractional(value)) => Self::Float(value),
|
||||
Some(StrToF64::Decimal(real)) => {
|
||||
Some(StrToF64::Fractional(value) | StrToF64::FractionalPrefix(value)) => {
|
||||
Self::Float(value)
|
||||
}
|
||||
Some(StrToF64::Decimal(real) | StrToF64::DecimalPrefix(real)) => {
|
||||
let integer = str_to_i64(text).unwrap_or(0);
|
||||
|
||||
if real == integer as f64 {
|
||||
@@ -460,9 +500,23 @@ pub fn str_to_i64(input: impl AsRef<str>) -> Option<i64> {
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StrToF64 {
|
||||
Fractional(NonNan),
|
||||
Decimal(NonNan),
|
||||
FractionalPrefix(NonNan),
|
||||
DecimalPrefix(NonNan),
|
||||
}
|
||||
|
||||
impl From<StrToF64> for f64 {
|
||||
fn from(value: StrToF64) -> Self {
|
||||
match value {
|
||||
StrToF64::Fractional(non_nan) => non_nan.into(),
|
||||
StrToF64::Decimal(non_nan) => non_nan.into(),
|
||||
StrToF64::FractionalPrefix(non_nan) => non_nan.into(),
|
||||
StrToF64::DecimalPrefix(non_nan) => non_nan.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
|
||||
@@ -480,10 +534,6 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
|
||||
let mut had_digits = false;
|
||||
let mut is_fractional = false;
|
||||
|
||||
if matches!(input.peek(), Some('e' | 'E')) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut significant: u64 = 0;
|
||||
|
||||
// Copy as many significant digits as we can
|
||||
@@ -509,12 +559,12 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
|
||||
}
|
||||
|
||||
if input.next_if(|ch| matches!(ch, '.')).is_some() {
|
||||
if matches!(input.peek(), Some('e' | 'E')) {
|
||||
return None;
|
||||
if had_digits {
|
||||
is_fractional = true;
|
||||
}
|
||||
|
||||
if had_digits || input.peek().is_some_and(char::is_ascii_digit) {
|
||||
is_fractional = true
|
||||
if input.peek().is_some_and(char::is_ascii_digit) {
|
||||
is_fractional = true;
|
||||
}
|
||||
|
||||
while let Some(digit) = input.peek().and_then(|ch| ch.to_digit(10)) {
|
||||
@@ -527,27 +577,32 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
|
||||
}
|
||||
};
|
||||
|
||||
if input.next_if(|ch| matches!(ch, 'e' | 'E')).is_some() {
|
||||
let mut valid_exponent = true;
|
||||
|
||||
if (had_digits || is_fractional) && input.next_if(|ch| matches!(ch, 'e' | 'E')).is_some() {
|
||||
let sign = match input.next_if(|ch| matches!(ch, '-' | '+')) {
|
||||
Some('-') => -1,
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
if input.peek().is_some_and(char::is_ascii_digit) {
|
||||
is_fractional = true
|
||||
}
|
||||
is_fractional = true;
|
||||
let mut e = 0;
|
||||
|
||||
let e = input.map_while(|ch| ch.to_digit(10)).fold(0, |acc, digit| {
|
||||
if acc < 1000 {
|
||||
acc * 10 + digit as i32
|
||||
} else {
|
||||
1000
|
||||
while let Some(ch) = input.next_if(char::is_ascii_digit) {
|
||||
e = (e * 10 + ch.to_digit(10).unwrap() as i32).min(1000);
|
||||
}
|
||||
});
|
||||
|
||||
exponent += sign * e;
|
||||
exponent += sign * e;
|
||||
} else {
|
||||
valid_exponent = false;
|
||||
}
|
||||
};
|
||||
|
||||
if !(had_digits || is_fractional) {
|
||||
return None;
|
||||
}
|
||||
|
||||
while exponent.is_positive() && significant < MAX_EXACT / 10 {
|
||||
significant *= 10;
|
||||
exponent -= 1;
|
||||
@@ -591,6 +646,14 @@ pub fn str_to_f64(input: impl AsRef<str>) -> Option<StrToF64> {
|
||||
let result = NonNan::new(f64::from(result) * sign)
|
||||
.unwrap_or_else(|| NonNan::new(sign * f64::INFINITY).unwrap());
|
||||
|
||||
if !valid_exponent || input.count() > 0 {
|
||||
if is_fractional {
|
||||
return Some(StrToF64::FractionalPrefix(result));
|
||||
} else {
|
||||
return Some(StrToF64::DecimalPrefix(result));
|
||||
}
|
||||
}
|
||||
|
||||
Some(if is_fractional {
|
||||
StrToF64::Fractional(result)
|
||||
} else {
|
||||
|
||||
373
core/schema.rs
373
core/schema.rs
@@ -19,11 +19,13 @@ use crate::translate::plan::SelectPlan;
|
||||
use crate::util::{
|
||||
module_args_from_sql, module_name_from_sql, type_from_name, IOExt, UnparsedFromSqlIndex,
|
||||
};
|
||||
use crate::{
|
||||
contains_ignore_ascii_case, eq_ignore_ascii_case, match_ignore_ascii_case, LimboError,
|
||||
MvCursor, Pager, RefValue, SymbolTable, VirtualTable,
|
||||
};
|
||||
use crate::{util::normalize_ident, Result};
|
||||
use crate::{LimboError, MvCursor, Pager, RefValue, SymbolTable, VirtualTable};
|
||||
use core::fmt;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
@@ -314,184 +316,44 @@ impl Schema {
|
||||
};
|
||||
|
||||
let mut record_cursor = cursor.record_cursor.borrow_mut();
|
||||
// sqlite schema table has 5 columns: type, name, tbl_name, rootpage, sql
|
||||
let ty_value = record_cursor.get_value(&row, 0)?;
|
||||
let RefValue::Text(ty) = ty_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
match ty.as_str() {
|
||||
"table" => {
|
||||
let root_page_value = record_cursor.get_value(&row, 3)?;
|
||||
let RefValue::Integer(root_page) = root_page_value else {
|
||||
return Err(LimboError::ConversionError("Expected integer value".into()));
|
||||
};
|
||||
let sql_value = record_cursor.get_value(&row, 4)?;
|
||||
let RefValue::Text(sql_text) = sql_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let sql = sql_text.as_str();
|
||||
let create_virtual = "create virtual";
|
||||
if root_page == 0
|
||||
&& sql[0..create_virtual.len()].eq_ignore_ascii_case(create_virtual)
|
||||
{
|
||||
let name_value = record_cursor.get_value(&row, 1)?;
|
||||
let RefValue::Text(name_text) = name_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let name = name_text.as_str();
|
||||
|
||||
// a virtual table is found in the sqlite_schema, but it's no
|
||||
// longer in the in-memory schema. We need to recreate it if
|
||||
// the module is loaded in the symbol table.
|
||||
let vtab = if let Some(vtab) = syms.vtabs.get(name) {
|
||||
Arc::new((**vtab).clone())
|
||||
} else {
|
||||
let mod_name = module_name_from_sql(sql)?;
|
||||
let vtab_rc = crate::VirtualTable::table(
|
||||
Some(name),
|
||||
mod_name,
|
||||
module_args_from_sql(sql)?,
|
||||
syms,
|
||||
)?;
|
||||
Arc::new((*vtab_rc).clone())
|
||||
};
|
||||
self.add_virtual_table(vtab);
|
||||
continue;
|
||||
}
|
||||
|
||||
let table = BTreeTable::from_sql(sql, root_page as usize)?;
|
||||
|
||||
// Check if this is a DBSP state table
|
||||
if table.name.starts_with(DBSP_TABLE_PREFIX) {
|
||||
// Extract the view name from _dbsp_state_<viewname>
|
||||
let view_name = table
|
||||
.name
|
||||
.strip_prefix(DBSP_TABLE_PREFIX)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
dbsp_state_roots.insert(view_name, root_page as usize);
|
||||
}
|
||||
|
||||
self.add_btree_table(Arc::new(table));
|
||||
}
|
||||
"index" => {
|
||||
let root_page_value = record_cursor.get_value(&row, 3)?;
|
||||
let RefValue::Integer(root_page) = root_page_value else {
|
||||
return Err(LimboError::ConversionError("Expected integer value".into()));
|
||||
};
|
||||
match record_cursor.get_value(&row, 4) {
|
||||
Ok(RefValue::Text(sql_text)) => {
|
||||
let table_name_value = record_cursor.get_value(&row, 2)?;
|
||||
let RefValue::Text(table_name_text) = table_name_value else {
|
||||
return Err(LimboError::ConversionError(
|
||||
"Expected text value".into(),
|
||||
));
|
||||
};
|
||||
|
||||
from_sql_indexes.push(UnparsedFromSqlIndex {
|
||||
table_name: table_name_text.as_str().to_string(),
|
||||
root_page: root_page as usize,
|
||||
sql: sql_text.as_str().to_string(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
let index_name_value = record_cursor.get_value(&row, 1)?;
|
||||
let RefValue::Text(index_name_text) = index_name_value else {
|
||||
return Err(LimboError::ConversionError(
|
||||
"Expected text value".into(),
|
||||
));
|
||||
};
|
||||
|
||||
let table_name_value = record_cursor.get_value(&row, 2)?;
|
||||
let RefValue::Text(table_name_text) = table_name_value else {
|
||||
return Err(LimboError::ConversionError(
|
||||
"Expected text value".into(),
|
||||
));
|
||||
};
|
||||
|
||||
match automatic_indices.entry(table_name_text.as_str().to_string()) {
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(vec![(
|
||||
index_name_text.as_str().to_string(),
|
||||
root_page as usize,
|
||||
)]);
|
||||
}
|
||||
Entry::Occupied(mut e) => {
|
||||
e.get_mut().push((
|
||||
index_name_text.as_str().to_string(),
|
||||
root_page as usize,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"view" => {
|
||||
let name_value = record_cursor.get_value(&row, 1)?;
|
||||
let RefValue::Text(name_text) = name_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let name = name_text.as_str();
|
||||
|
||||
// Get the root page (column 3) to determine if this is a materialized view
|
||||
// Regular views have rootpage = 0, materialized views have rootpage != 0
|
||||
let root_page_value = record_cursor.get_value(&row, 3)?;
|
||||
let RefValue::Integer(root_page_int) = root_page_value else {
|
||||
return Err(LimboError::ConversionError("Expected integer value".into()));
|
||||
};
|
||||
let root_page = root_page_int as usize;
|
||||
|
||||
let sql_value = record_cursor.get_value(&row, 4)?;
|
||||
let RefValue::Text(sql_text) = sql_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let sql = sql_text.as_str();
|
||||
|
||||
// Parse the SQL to determine if it's a regular or materialized view
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
if let Ok(Some(Cmd::Stmt(stmt))) = parser.next_cmd() {
|
||||
match stmt {
|
||||
Stmt::CreateMaterializedView { .. } => {
|
||||
// Store materialized view info for later creation
|
||||
// We'll create the actual IncrementalView in a later pass
|
||||
// when we have both the main root page and DBSP state root
|
||||
let view_name = name.to_string();
|
||||
materialized_view_info
|
||||
.insert(view_name, (sql.to_string(), root_page));
|
||||
}
|
||||
Stmt::CreateView {
|
||||
view_name: _,
|
||||
columns: column_names,
|
||||
select,
|
||||
..
|
||||
} => {
|
||||
// Extract actual columns from the SELECT statement
|
||||
let view_columns = crate::util::extract_view_columns(&select, self);
|
||||
|
||||
// If column names were provided in CREATE VIEW (col1, col2, ...),
|
||||
// use them to rename the columns
|
||||
let mut final_columns = view_columns;
|
||||
for (i, indexed_col) in column_names.iter().enumerate() {
|
||||
if let Some(col) = final_columns.get_mut(i) {
|
||||
col.name = Some(indexed_col.col_name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Create regular view
|
||||
let view = View {
|
||||
name: name.to_string(),
|
||||
sql: sql.to_string(),
|
||||
select_stmt: select,
|
||||
columns: final_columns,
|
||||
};
|
||||
self.add_view(view);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
let ty = ty.as_str();
|
||||
let RefValue::Text(name) = record_cursor.get_value(&row, 1)? else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let name = name.as_str();
|
||||
let table_name_value = record_cursor.get_value(&row, 2)?;
|
||||
let RefValue::Text(table_name) = table_name_value else {
|
||||
return Err(LimboError::ConversionError("Expected text value".into()));
|
||||
};
|
||||
let table_name = table_name.as_str();
|
||||
let root_page_value = record_cursor.get_value(&row, 3)?;
|
||||
let RefValue::Integer(root_page) = root_page_value else {
|
||||
return Err(LimboError::ConversionError("Expected integer value".into()));
|
||||
};
|
||||
let sql_value = record_cursor.get_value(&row, 4)?;
|
||||
let sql_textref = match sql_value {
|
||||
RefValue::Text(sql) => Some(sql),
|
||||
_ => None,
|
||||
};
|
||||
let sql = sql_textref.as_ref().map(|s| s.as_str());
|
||||
|
||||
self.handle_schema_row(
|
||||
ty,
|
||||
name,
|
||||
table_name,
|
||||
root_page,
|
||||
sql,
|
||||
syms,
|
||||
&mut from_sql_indexes,
|
||||
&mut automatic_indices,
|
||||
&mut dbsp_state_roots,
|
||||
&mut materialized_view_info,
|
||||
)?;
|
||||
drop(record_cursor);
|
||||
drop(row);
|
||||
|
||||
@@ -500,6 +362,21 @@ impl Schema {
|
||||
|
||||
pager.end_read_tx()?;
|
||||
|
||||
self.populate_indices(from_sql_indexes, automatic_indices)?;
|
||||
|
||||
self.populate_materialized_views(materialized_view_info, dbsp_state_roots)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Populate indices parsed from the schema.
|
||||
/// from_sql_indexes: indices explicitly created with CREATE INDEX
|
||||
/// automatic_indices: indices created automatically for primary key and unique constraints
|
||||
pub fn populate_indices(
|
||||
&mut self,
|
||||
from_sql_indexes: Vec<UnparsedFromSqlIndex>,
|
||||
automatic_indices: std::collections::HashMap<String, Vec<(String, usize)>>,
|
||||
) -> Result<()> {
|
||||
for unparsed_sql_from_index in from_sql_indexes {
|
||||
if !self.indexes_enabled() {
|
||||
self.table_set_has_index(&unparsed_sql_from_index.table_name);
|
||||
@@ -530,8 +407,15 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Third pass: Create materialized views now that we have both root pages
|
||||
/// Populate materialized views parsed from the schema.
|
||||
pub fn populate_materialized_views(
|
||||
&mut self,
|
||||
materialized_view_info: std::collections::HashMap<String, (String, usize)>,
|
||||
dbsp_state_roots: std::collections::HashMap<String, usize>,
|
||||
) -> Result<()> {
|
||||
for (view_name, (sql, main_root)) in materialized_view_info {
|
||||
// Look up the DBSP state root for this view - must exist for materialized views
|
||||
let dbsp_state_root = dbsp_state_roots.get(&view_name).ok_or_else(|| {
|
||||
@@ -563,6 +447,143 @@ impl Schema {
|
||||
self.add_materialized_view_dependency(&table_name, &view_name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn handle_schema_row(
|
||||
&mut self,
|
||||
ty: &str,
|
||||
name: &str,
|
||||
table_name: &str,
|
||||
root_page: i64,
|
||||
maybe_sql: Option<&str>,
|
||||
syms: &SymbolTable,
|
||||
from_sql_indexes: &mut Vec<UnparsedFromSqlIndex>,
|
||||
automatic_indices: &mut std::collections::HashMap<String, Vec<(String, usize)>>,
|
||||
dbsp_state_roots: &mut std::collections::HashMap<String, usize>,
|
||||
materialized_view_info: &mut std::collections::HashMap<String, (String, usize)>,
|
||||
) -> Result<()> {
|
||||
match ty {
|
||||
"table" => {
|
||||
let sql = maybe_sql.expect("sql should be present for table");
|
||||
let sql_bytes = sql.as_bytes();
|
||||
if root_page == 0 && contains_ignore_ascii_case!(sql_bytes, b"create virtual") {
|
||||
// a virtual table is found in the sqlite_schema, but it's no
|
||||
// longer in the in-memory schema. We need to recreate it if
|
||||
// the module is loaded in the symbol table.
|
||||
let vtab = if let Some(vtab) = syms.vtabs.get(name) {
|
||||
vtab.clone()
|
||||
} else {
|
||||
let mod_name = module_name_from_sql(sql)?;
|
||||
crate::VirtualTable::table(
|
||||
Some(name),
|
||||
mod_name,
|
||||
module_args_from_sql(sql)?,
|
||||
syms,
|
||||
)?
|
||||
};
|
||||
self.add_virtual_table(vtab);
|
||||
} else {
|
||||
let table = BTreeTable::from_sql(sql, root_page as usize)?;
|
||||
|
||||
// Check if this is a DBSP state table
|
||||
if table.name.starts_with(DBSP_TABLE_PREFIX) {
|
||||
// Extract the view name from __turso_internal_dbsp_state_<viewname>
|
||||
let view_name = table
|
||||
.name
|
||||
.strip_prefix(DBSP_TABLE_PREFIX)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
dbsp_state_roots.insert(view_name, root_page as usize);
|
||||
}
|
||||
|
||||
self.add_btree_table(Arc::new(table));
|
||||
}
|
||||
}
|
||||
"index" => {
|
||||
match maybe_sql {
|
||||
Some(sql) => {
|
||||
from_sql_indexes.push(UnparsedFromSqlIndex {
|
||||
table_name: table_name.to_string(),
|
||||
root_page: root_page as usize,
|
||||
sql: sql.to_string(),
|
||||
});
|
||||
}
|
||||
None => {
|
||||
// Automatic index on primary key and/or unique constraint, e.g.
|
||||
// table|foo|foo|2|CREATE TABLE foo (a text PRIMARY KEY, b)
|
||||
// index|sqlite_autoindex_foo_1|foo|3|
|
||||
let index_name = name.to_string();
|
||||
let table_name = table_name.to_string();
|
||||
match automatic_indices.entry(table_name) {
|
||||
std::collections::hash_map::Entry::Vacant(e) => {
|
||||
e.insert(vec![(index_name, root_page as usize)]);
|
||||
}
|
||||
std::collections::hash_map::Entry::Occupied(mut e) => {
|
||||
e.get_mut().push((index_name, root_page as usize));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"view" => {
|
||||
use crate::schema::View;
|
||||
use turso_parser::ast::{Cmd, Stmt};
|
||||
use turso_parser::parser::Parser;
|
||||
|
||||
let sql = maybe_sql.expect("sql should be present for view");
|
||||
let view_name = name.to_string();
|
||||
|
||||
// Parse the SQL to determine if it's a regular or materialized view
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
if let Ok(Some(Cmd::Stmt(stmt))) = parser.next_cmd() {
|
||||
match stmt {
|
||||
Stmt::CreateMaterializedView { .. } => {
|
||||
// Store materialized view info for later creation
|
||||
// We'll handle reuse logic and create the actual IncrementalView
|
||||
// in a later pass when we have both the main root page and DBSP state root
|
||||
materialized_view_info
|
||||
.insert(view_name.clone(), (sql.to_string(), root_page as usize));
|
||||
|
||||
// Mark the existing view for potential reuse
|
||||
if self.incremental_views.contains_key(&view_name) {
|
||||
// We'll check for reuse in the third pass
|
||||
}
|
||||
}
|
||||
Stmt::CreateView {
|
||||
view_name: _,
|
||||
columns: column_names,
|
||||
select,
|
||||
..
|
||||
} => {
|
||||
// Extract actual columns from the SELECT statement
|
||||
let view_columns = crate::util::extract_view_columns(&select, self);
|
||||
|
||||
// If column names were provided in CREATE VIEW (col1, col2, ...),
|
||||
// use them to rename the columns
|
||||
let mut final_columns = view_columns;
|
||||
for (i, indexed_col) in column_names.iter().enumerate() {
|
||||
if let Some(col) = final_columns.get_mut(i) {
|
||||
col.name = Some(indexed_col.col_name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Create regular view
|
||||
let view = View {
|
||||
name: name.to_string(),
|
||||
sql: sql.to_string(),
|
||||
select_stmt: select,
|
||||
columns: final_columns,
|
||||
};
|
||||
self.add_view(view);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -481,7 +481,7 @@ pub struct BTreeCursor {
|
||||
/// Page id of the root page used to go back up fast.
|
||||
root_page: usize,
|
||||
/// Rowid and record are stored before being consumed.
|
||||
has_record: Cell<bool>,
|
||||
pub has_record: Cell<bool>,
|
||||
null_flag: bool,
|
||||
/// Index internal pages are consumed on the way up, so we store going upwards flag in case
|
||||
/// we just moved to a parent page and the parent page is an internal index page which requires
|
||||
@@ -543,6 +543,11 @@ pub struct BTreeCursor {
|
||||
seek_end_state: SeekEndState,
|
||||
/// State machine for [BTreeCursor::move_to]
|
||||
move_to_state: MoveToState,
|
||||
/// Whether the next call to [BTreeCursor::next()] should be a no-op.
|
||||
/// This is currently only used after a delete operation causes a rebalancing.
|
||||
/// Advancing is only skipped if the cursor is currently pointing to a valid record
|
||||
/// when next() is called.
|
||||
pub skip_advance: Cell<bool>,
|
||||
}
|
||||
|
||||
/// We store the cell index and cell count for each page in the stack.
|
||||
@@ -615,6 +620,7 @@ impl BTreeCursor {
|
||||
count_state: CountState::Start,
|
||||
seek_end_state: SeekEndState::Start,
|
||||
move_to_state: MoveToState::Start,
|
||||
skip_advance: Cell::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -696,7 +702,7 @@ impl BTreeCursor {
|
||||
/// Move the cursor to the previous record and return it.
|
||||
/// Used in backwards iteration.
|
||||
#[instrument(skip(self), level = Level::DEBUG, name = "prev")]
|
||||
fn get_prev_record(&mut self) -> Result<IOResult<bool>> {
|
||||
pub fn get_prev_record(&mut self) -> Result<IOResult<bool>> {
|
||||
loop {
|
||||
let (old_top_idx, page_type, is_index, is_leaf, cell_count) = {
|
||||
let page = self.stack.top_ref();
|
||||
@@ -1202,7 +1208,7 @@ impl BTreeCursor {
|
||||
/// Move the cursor to the next record and return it.
|
||||
/// Used in forwards iteration, which is the default.
|
||||
#[instrument(skip(self), level = Level::DEBUG, name = "next")]
|
||||
fn get_next_record(&mut self) -> Result<IOResult<bool>> {
|
||||
pub fn get_next_record(&mut self) -> Result<IOResult<bool>> {
|
||||
if let Some(mv_cursor) = &self.mv_cursor {
|
||||
let mut mv_cursor = mv_cursor.borrow_mut();
|
||||
mv_cursor.forward();
|
||||
@@ -2633,20 +2639,22 @@ impl BTreeCursor {
|
||||
let current_sibling = sibling_pointer;
|
||||
let mut completions: Vec<Completion> = Vec::with_capacity(current_sibling + 1);
|
||||
for i in (0..=current_sibling).rev() {
|
||||
let (page, c) =
|
||||
btree_read_page(&self.pager, pgno as usize).inspect_err(|_| {
|
||||
for c in completions.iter() {
|
||||
c.abort();
|
||||
match btree_read_page(&self.pager, pgno as usize) {
|
||||
Err(e) => {
|
||||
tracing::error!("error reading page {}: {}", pgno, e);
|
||||
self.pager.io.cancel(&completions)?;
|
||||
self.pager.io.drain()?;
|
||||
return Err(e);
|
||||
}
|
||||
Ok((page, c)) => {
|
||||
// mark as dirty
|
||||
self.pager.add_dirty(&page);
|
||||
pages_to_balance[i].replace(page);
|
||||
if let Some(c) = c {
|
||||
completions.push(c);
|
||||
}
|
||||
})?;
|
||||
{
|
||||
// mark as dirty
|
||||
self.pager.add_dirty(&page);
|
||||
}
|
||||
}
|
||||
if let Some(c) = c {
|
||||
completions.push(c);
|
||||
}
|
||||
pages_to_balance[i].replace(page);
|
||||
if i == 0 {
|
||||
break;
|
||||
}
|
||||
@@ -3370,13 +3378,15 @@ impl BTreeCursor {
|
||||
"left pointer is the same as parent page id"
|
||||
);
|
||||
#[cfg(debug_assertions)]
|
||||
pages_pointed_to.insert(left_pointer);
|
||||
tracing::debug!(
|
||||
"balance_non_root(insert_divider_cell, first_divider_cell={}, divider_cell={}, left_pointer={})",
|
||||
balance_info.first_divider_cell,
|
||||
sibling_page_idx,
|
||||
left_pointer
|
||||
);
|
||||
{
|
||||
pages_pointed_to.insert(left_pointer);
|
||||
tracing::debug!(
|
||||
"balance_non_root(insert_divider_cell, first_divider_cell={}, divider_cell={}, left_pointer={})",
|
||||
balance_info.first_divider_cell,
|
||||
sibling_page_idx,
|
||||
left_pointer
|
||||
);
|
||||
}
|
||||
turso_assert!(
|
||||
left_pointer == page.get().id as u32,
|
||||
"left pointer is not the same as page id"
|
||||
@@ -4239,6 +4249,7 @@ impl BTreeCursor {
|
||||
if self.valid_state == CursorValidState::Invalid {
|
||||
return Ok(IOResult::Done(()));
|
||||
}
|
||||
self.skip_advance.set(false);
|
||||
loop {
|
||||
match self.rewind_state {
|
||||
RewindState::Start => {
|
||||
@@ -4278,6 +4289,23 @@ impl BTreeCursor {
|
||||
if self.valid_state == CursorValidState::Invalid {
|
||||
return Ok(IOResult::Done(false));
|
||||
}
|
||||
if self.skip_advance.get() {
|
||||
// See DeleteState::RestoreContextAfterBalancing
|
||||
self.skip_advance.set(false);
|
||||
let mem_page = self.stack.top_ref();
|
||||
let contents = mem_page.get_contents();
|
||||
let cell_idx = self.stack.current_cell_index();
|
||||
let cell_count = contents.cell_count();
|
||||
let has_record = cell_idx >= 0 && cell_idx < cell_count as i32;
|
||||
if has_record {
|
||||
self.has_record.set(true);
|
||||
// If we are positioned at a record, we stop here without advancing.
|
||||
return Ok(IOResult::Done(true));
|
||||
}
|
||||
// But: if we aren't currently positioned at a record (for example, we are at the end of a page),
|
||||
// we need to advance despite the skip_advance flag
|
||||
// because the intent is to find the next record immediately after the one we just deleted.
|
||||
}
|
||||
loop {
|
||||
match self.advance_state {
|
||||
AdvanceState::Start => {
|
||||
@@ -4294,7 +4322,7 @@ impl BTreeCursor {
|
||||
}
|
||||
}
|
||||
|
||||
fn invalidate_record(&mut self) {
|
||||
pub fn invalidate_record(&mut self) {
|
||||
self.get_immutable_record_or_create()
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
@@ -4353,12 +4381,13 @@ impl BTreeCursor {
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = Level::DEBUG)]
|
||||
#[instrument(skip(self, key), level = Level::DEBUG)]
|
||||
pub fn seek(&mut self, key: SeekKey<'_>, op: SeekOp) -> Result<IOResult<SeekResult>> {
|
||||
if let Some(mv_cursor) = &self.mv_cursor {
|
||||
let mut mv_cursor = mv_cursor.borrow_mut();
|
||||
return mv_cursor.seek(key, op);
|
||||
}
|
||||
self.skip_advance.set(false);
|
||||
// Empty trace to capture the span information
|
||||
tracing::trace!("");
|
||||
// We need to clear the null flag for the table cursor before seeking,
|
||||
@@ -4453,7 +4482,7 @@ impl BTreeCursor {
|
||||
Ok(IOResult::Done(Some(record_ref)))
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = Level::DEBUG)]
|
||||
#[instrument(skip_all, level = Level::DEBUG)]
|
||||
pub fn insert(&mut self, key: &BTreeKey) -> Result<IOResult<()>> {
|
||||
tracing::debug!(valid_state = ?self.valid_state, cursor_state = ?self.state, is_write_in_progress = self.is_write_in_progress());
|
||||
match &self.mv_cursor {
|
||||
@@ -4545,7 +4574,7 @@ impl BTreeCursor {
|
||||
};
|
||||
CursorContext {
|
||||
key: CursorContextKey::IndexKeyRowId(record),
|
||||
seek_op: SeekOp::LT,
|
||||
seek_op: SeekOp::GE { eq_only: true },
|
||||
}
|
||||
} else {
|
||||
let Some(rowid) = return_if_io!(self.rowid()) else {
|
||||
@@ -4553,7 +4582,7 @@ impl BTreeCursor {
|
||||
};
|
||||
CursorContext {
|
||||
key: CursorContextKey::TableRowId(rowid),
|
||||
seek_op: SeekOp::LT,
|
||||
seek_op: SeekOp::GE { eq_only: true },
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4826,6 +4855,12 @@ impl BTreeCursor {
|
||||
}
|
||||
DeleteState::RestoreContextAfterBalancing => {
|
||||
return_if_io!(self.restore_context());
|
||||
|
||||
// We deleted key K, and performed a seek to: GE { eq_only: true } K.
|
||||
// This means that the cursor is now pointing to the next key after K.
|
||||
// We need to make the next call to BTreeCursor::next() a no-op so that we don't skip over
|
||||
// a row when deleting rows in a loop.
|
||||
self.skip_advance.set(true);
|
||||
self.state = CursorState::None;
|
||||
return Ok(IOResult::Done(()));
|
||||
}
|
||||
@@ -5485,6 +5520,13 @@ pub enum IntegrityCheckError {
|
||||
references: Vec<u64>,
|
||||
page_category: PageCategory,
|
||||
},
|
||||
#[error(
|
||||
"Freelist count mismatch. actual_count={actual_count}, expected_count={expected_count}"
|
||||
)]
|
||||
FreelistCountMismatch {
|
||||
actual_count: usize,
|
||||
expected_count: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
@@ -5495,6 +5537,12 @@ pub(crate) enum PageCategory {
|
||||
FreePage,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CheckFreelist {
|
||||
pub expected_count: usize,
|
||||
pub actual_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct IntegrityCheckPageEntry {
|
||||
page_idx: usize,
|
||||
@@ -5507,6 +5555,7 @@ pub struct IntegrityCheckState {
|
||||
first_leaf_level: Option<usize>,
|
||||
page_reference: HashMap<u64, u64>,
|
||||
page: Option<PageRef>,
|
||||
pub freelist_count: CheckFreelist,
|
||||
}
|
||||
|
||||
impl IntegrityCheckState {
|
||||
@@ -5516,9 +5565,17 @@ impl IntegrityCheckState {
|
||||
page_reference: HashMap::new(),
|
||||
first_leaf_level: None,
|
||||
page: None,
|
||||
freelist_count: CheckFreelist {
|
||||
expected_count: 0,
|
||||
actual_count: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_expected_freelist_count(&mut self, count: usize) {
|
||||
self.freelist_count.expected_count = count;
|
||||
}
|
||||
|
||||
pub fn start(
|
||||
&mut self,
|
||||
page_idx: usize,
|
||||
@@ -5552,10 +5609,7 @@ impl IntegrityCheckState {
|
||||
) {
|
||||
let page_id = entry.page_idx as u64;
|
||||
let Some(previous) = self.page_reference.insert(page_id, referenced_by) else {
|
||||
// do not traverse free pages as they have no meaingful structured content
|
||||
if entry.page_category != PageCategory::FreePage {
|
||||
self.page_stack.push(entry);
|
||||
}
|
||||
self.page_stack.push(entry);
|
||||
return;
|
||||
};
|
||||
errors.push(IntegrityCheckError::PageReferencedMultipleTimes {
|
||||
@@ -5614,6 +5668,7 @@ pub fn integrity_check(
|
||||
|
||||
let contents = page.get_contents();
|
||||
if page_category == PageCategory::FreeListTrunk {
|
||||
state.freelist_count.actual_count += 1;
|
||||
let next_freelist_trunk_page = contents.read_u32_no_offset(0);
|
||||
if next_freelist_trunk_page != 0 {
|
||||
state.push_page(
|
||||
@@ -5643,6 +5698,10 @@ pub fn integrity_check(
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if page_category == PageCategory::FreePage {
|
||||
state.freelist_count.actual_count += 1;
|
||||
continue;
|
||||
}
|
||||
if page_category == PageCategory::Overflow {
|
||||
let next_overflow_page = contents.read_u32_no_offset(0);
|
||||
if next_overflow_page != 0 {
|
||||
@@ -6120,10 +6179,13 @@ impl PageStack {
|
||||
#[instrument(skip(self), level = Level::DEBUG, name = "pagestack::retreat")]
|
||||
fn retreat(&mut self) {
|
||||
let current = self.current();
|
||||
tracing::trace!(
|
||||
curr_cell_index = self.node_states[current].cell_idx,
|
||||
node_states = ?self.node_states.iter().map(|state| state.cell_idx).collect::<Vec<_>>(),
|
||||
);
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
tracing::trace!(
|
||||
curr_cell_index = self.node_states[current].cell_idx,
|
||||
node_states = ?self.node_states.iter().map(|state| state.cell_idx).collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
self.node_states[current].cell_idx -= 1;
|
||||
}
|
||||
|
||||
@@ -7604,7 +7666,7 @@ mod tests {
|
||||
let cursor = BTreeCursor::new_table(None, pager.clone(), page_idx, num_columns);
|
||||
let (page, _c) = cursor.read_page(page_idx).unwrap();
|
||||
while page.is_locked() {
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
}
|
||||
|
||||
// Pin page in order to not drop it in between
|
||||
@@ -7624,7 +7686,7 @@ mod tests {
|
||||
}) => {
|
||||
let (child_page, _c) = cursor.read_page(left_child_page as usize).unwrap();
|
||||
while child_page.is_locked() {
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
}
|
||||
child_pages.push(child_page);
|
||||
if left_child_page == page.get().id as u32 {
|
||||
@@ -7683,7 +7745,7 @@ mod tests {
|
||||
*p = new_page;
|
||||
}
|
||||
while p.is_locked() {
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
}
|
||||
p.get_contents().page_type()
|
||||
});
|
||||
@@ -7694,7 +7756,7 @@ mod tests {
|
||||
*page = new_page;
|
||||
}
|
||||
while page.is_locked() {
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
}
|
||||
if page.get_contents().page_type() != child_type {
|
||||
tracing::error!("child pages have different types");
|
||||
@@ -7715,7 +7777,7 @@ mod tests {
|
||||
let cursor = BTreeCursor::new_table(None, pager.clone(), page_idx, num_columns);
|
||||
let (page, _c) = cursor.read_page(page_idx).unwrap();
|
||||
while page.is_locked() {
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
}
|
||||
|
||||
// Pin page in order to not drop it in between loading of different pages. If not contents will be a dangling reference.
|
||||
@@ -8709,7 +8771,7 @@ mod tests {
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
pager.io.run_once().unwrap();
|
||||
pager.io.step().unwrap();
|
||||
|
||||
let _ = run_until_done(|| pager.allocate_page1(), &pager);
|
||||
for _ in 0..(database_size - 1) {
|
||||
@@ -8761,11 +8823,11 @@ mod tests {
|
||||
&IOContext::default(),
|
||||
c,
|
||||
)?;
|
||||
pager.io.run_once()?;
|
||||
pager.io.step()?;
|
||||
|
||||
let (page, _c) = cursor.read_page(current_page as usize)?;
|
||||
while page.is_locked() {
|
||||
cursor.pager.io.run_once()?;
|
||||
cursor.pager.io.step()?;
|
||||
}
|
||||
|
||||
{
|
||||
@@ -8784,7 +8846,7 @@ mod tests {
|
||||
|
||||
current_page += 1;
|
||||
}
|
||||
pager.io.run_once()?;
|
||||
pager.io.step()?;
|
||||
|
||||
// Create leaf cell pointing to start of overflow chain
|
||||
let leaf_cell = BTreeCell::TableLeafCell(TableLeafCell {
|
||||
|
||||
@@ -1275,34 +1275,36 @@ impl Pager {
|
||||
};
|
||||
pages.push(page);
|
||||
if pages.len() == IOV_MAX {
|
||||
let c = wal
|
||||
.borrow_mut()
|
||||
.append_frames_vectored(
|
||||
std::mem::replace(
|
||||
&mut pages,
|
||||
Vec::with_capacity(std::cmp::min(IOV_MAX, dirty_pages.len() - idx)),
|
||||
),
|
||||
page_sz,
|
||||
commit_frame,
|
||||
)
|
||||
.inspect_err(|_| {
|
||||
for c in completions.iter() {
|
||||
c.abort();
|
||||
}
|
||||
})?;
|
||||
completions.push(c);
|
||||
match wal.borrow_mut().append_frames_vectored(
|
||||
std::mem::replace(
|
||||
&mut pages,
|
||||
Vec::with_capacity(std::cmp::min(IOV_MAX, dirty_pages.len() - idx)),
|
||||
),
|
||||
page_sz,
|
||||
commit_frame,
|
||||
) {
|
||||
Err(e) => {
|
||||
self.io.cancel(&completions)?;
|
||||
self.io.drain()?;
|
||||
return Err(e);
|
||||
}
|
||||
Ok(c) => completions.push(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
if !pages.is_empty() {
|
||||
let c = wal
|
||||
match wal
|
||||
.borrow_mut()
|
||||
.append_frames_vectored(pages, page_sz, commit_frame)
|
||||
.inspect_err(|_| {
|
||||
for c in completions.iter() {
|
||||
c.abort();
|
||||
}
|
||||
})?;
|
||||
completions.push(c);
|
||||
{
|
||||
Ok(c) => completions.push(c),
|
||||
Err(e) => {
|
||||
tracing::error!("cacheflush: error appending frames: {e}");
|
||||
self.io.cancel(&completions)?;
|
||||
self.io.drain()?;
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(completions)
|
||||
}
|
||||
@@ -1379,9 +1381,7 @@ impl Pager {
|
||||
match r {
|
||||
Ok(c) => completions.push(c),
|
||||
Err(e) => {
|
||||
for c in &completions {
|
||||
c.abort();
|
||||
}
|
||||
self.io.cancel(&completions)?;
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
@@ -1984,7 +1984,7 @@ impl Pager {
|
||||
// Freelist is not empty, so we can reuse the trunk itself as a new page
|
||||
// and update the database's first freelist trunk page to the next trunk page.
|
||||
header.freelist_trunk_page = next_trunk_page_id.into();
|
||||
header.freelist_pages = (header.freelist_pages.get() + 1).into();
|
||||
header.freelist_pages = (header.freelist_pages.get() - 1).into();
|
||||
self.add_dirty(trunk_page);
|
||||
// zero out the page
|
||||
turso_assert!(
|
||||
|
||||
@@ -1086,9 +1086,9 @@ pub fn write_pages_vectored(
|
||||
if runs_left.fetch_sub(1, Ordering::AcqRel) == 1 {
|
||||
done.store(true, Ordering::Release);
|
||||
}
|
||||
for c in completions {
|
||||
c.abort();
|
||||
}
|
||||
pager.io.cancel(&completions)?;
|
||||
// cancel any submitted completions and drain the IO before returning an error
|
||||
pager.io.drain()?;
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,9 +143,24 @@ impl Display for DeletePlan {
|
||||
|
||||
writeln!(f, "{indent}DELETE FROM {table_name}")?;
|
||||
}
|
||||
Operation::Search { .. } => {
|
||||
panic!("DELETE plans should not contain search operations");
|
||||
}
|
||||
Operation::Search(search) => match search {
|
||||
Search::RowidEq { .. } | Search::Seek { index: None, .. } => {
|
||||
writeln!(
|
||||
f,
|
||||
"{}SEARCH {} USING INTEGER PRIMARY KEY (rowid=?)",
|
||||
indent, reference.identifier
|
||||
)?;
|
||||
}
|
||||
Search::Seek {
|
||||
index: Some(index), ..
|
||||
} => {
|
||||
writeln!(
|
||||
f,
|
||||
"{}SEARCH {} USING INDEX {}",
|
||||
indent, reference.identifier, index.name
|
||||
)?;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -530,48 +530,57 @@ fn emit_delete_insns(
|
||||
.schema
|
||||
.indexes
|
||||
.get(table_reference.table.get_name());
|
||||
let index_refs_opt = indexes.map(|indexes| {
|
||||
indexes
|
||||
.iter()
|
||||
.map(|index| {
|
||||
(
|
||||
index.clone(),
|
||||
program.resolve_cursor_id(&CursorKey::index(
|
||||
table_reference.internal_id,
|
||||
index.clone(),
|
||||
)),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
if let Some(index_refs) = index_refs_opt {
|
||||
for (index, index_cursor_id) in index_refs {
|
||||
let num_regs = index.columns.len() + 1;
|
||||
let start_reg = program.alloc_registers(num_regs);
|
||||
// Emit columns that are part of the index
|
||||
index
|
||||
.columns
|
||||
// Get the index that is being used to iterate the deletion loop, if there is one.
|
||||
let iteration_index = table_reference.op.index();
|
||||
// Get all indexes that are not the iteration index.
|
||||
let other_indexes = indexes
|
||||
.map(|indexes| {
|
||||
indexes
|
||||
.iter()
|
||||
.enumerate()
|
||||
.for_each(|(reg_offset, column_index)| {
|
||||
program.emit_column_or_rowid(
|
||||
main_table_cursor_id,
|
||||
column_index.pos_in_table,
|
||||
start_reg + reg_offset,
|
||||
);
|
||||
});
|
||||
program.emit_insn(Insn::RowId {
|
||||
cursor_id: main_table_cursor_id,
|
||||
dest: start_reg + num_regs - 1,
|
||||
.filter(|index| {
|
||||
iteration_index
|
||||
.as_ref()
|
||||
.is_none_or(|it_idx| !Arc::ptr_eq(it_idx, index))
|
||||
})
|
||||
.map(|index| {
|
||||
(
|
||||
index.clone(),
|
||||
program.resolve_cursor_id(&CursorKey::index(
|
||||
table_reference.internal_id,
|
||||
index.clone(),
|
||||
)),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
for (index, index_cursor_id) in other_indexes {
|
||||
let num_regs = index.columns.len() + 1;
|
||||
let start_reg = program.alloc_registers(num_regs);
|
||||
// Emit columns that are part of the index
|
||||
index
|
||||
.columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.for_each(|(reg_offset, column_index)| {
|
||||
program.emit_column_or_rowid(
|
||||
main_table_cursor_id,
|
||||
column_index.pos_in_table,
|
||||
start_reg + reg_offset,
|
||||
);
|
||||
});
|
||||
program.emit_insn(Insn::IdxDelete {
|
||||
start_reg,
|
||||
num_regs,
|
||||
cursor_id: index_cursor_id,
|
||||
raise_error_if_no_matching_entry: true,
|
||||
});
|
||||
}
|
||||
program.emit_insn(Insn::RowId {
|
||||
cursor_id: main_table_cursor_id,
|
||||
dest: start_reg + num_regs - 1,
|
||||
});
|
||||
program.emit_insn(Insn::IdxDelete {
|
||||
start_reg,
|
||||
num_regs,
|
||||
cursor_id: index_cursor_id,
|
||||
raise_error_if_no_matching_entry: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Emit update in the CDC table if necessary (before DELETE updated the table)
|
||||
@@ -636,6 +645,17 @@ fn emit_delete_insns(
|
||||
cursor_id: main_table_cursor_id,
|
||||
table_name: table_reference.table.get_name().to_string(),
|
||||
});
|
||||
|
||||
if let Some(index) = iteration_index {
|
||||
let iteration_index_cursor = program.resolve_cursor_id(&CursorKey::index(
|
||||
table_reference.internal_id,
|
||||
index.clone(),
|
||||
));
|
||||
program.emit_insn(Insn::Delete {
|
||||
cursor_id: iteration_index_cursor,
|
||||
table_name: index.name.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(limit_ctx) = t_ctx.limit_ctx {
|
||||
program.emit_insn(Insn::DecrJumpZero {
|
||||
|
||||
@@ -283,8 +283,45 @@ pub fn translate_condition_expr(
|
||||
resolver: &Resolver,
|
||||
) -> Result<()> {
|
||||
match expr {
|
||||
ast::Expr::Register(_) => {
|
||||
crate::bail_parse_error!("Register in WHERE clause is currently unused. Consider removing Resolver::expr_to_reg_cache and using Expr::Register instead");
|
||||
}
|
||||
ast::Expr::Collate(_, _) => {
|
||||
crate::bail_parse_error!("Collate in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::DoublyQualified(_, _, _) | ast::Expr::Id(_) | ast::Expr::Qualified(_, _) => {
|
||||
crate::bail_parse_error!(
|
||||
"DoublyQualified/Id/Qualified should have been rewritten in optimizer"
|
||||
);
|
||||
}
|
||||
ast::Expr::Exists(_) => {
|
||||
crate::bail_parse_error!("EXISTS in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::Subquery(_) => {
|
||||
crate::bail_parse_error!("Subquery in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::InSelect { .. } => {
|
||||
crate::bail_parse_error!("IN (...subquery) in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::InTable { .. } => {
|
||||
crate::bail_parse_error!("Table expression in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::FunctionCallStar { .. } => {
|
||||
crate::bail_parse_error!("FunctionCallStar in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::Raise(_, _) => {
|
||||
crate::bail_parse_error!("RAISE in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::Between { .. } => {
|
||||
unreachable!("expression should have been rewritten in optmizer")
|
||||
crate::bail_parse_error!("BETWEEN expression should have been rewritten in optmizer")
|
||||
}
|
||||
ast::Expr::Variable(_) => {
|
||||
crate::bail_parse_error!(
|
||||
"Variable as a direct predicate in WHERE clause is not supported"
|
||||
);
|
||||
}
|
||||
ast::Expr::Name(_) => {
|
||||
crate::bail_parse_error!("Name as a direct predicate in WHERE clause is not supported");
|
||||
}
|
||||
ast::Expr::Binary(lhs, ast::Operator::And, rhs) => {
|
||||
// In a binary AND, never jump to the parent 'jump_target_when_true' label on the first condition, because
|
||||
@@ -445,7 +482,6 @@ pub fn translate_condition_expr(
|
||||
translate_expr(program, Some(referenced_tables), expr, expr_reg, resolver)?;
|
||||
emit_cond_jump(program, condition_metadata, expr_reg);
|
||||
}
|
||||
other => todo!("expression {:?} not implemented", other),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -641,8 +677,10 @@ pub fn translate_expr(
|
||||
program.set_collation(Some((collation, true)));
|
||||
Ok(target_register)
|
||||
}
|
||||
ast::Expr::DoublyQualified(_, _, _) => todo!(),
|
||||
ast::Expr::Exists(_) => todo!(),
|
||||
ast::Expr::DoublyQualified(_, _, _) => {
|
||||
crate::bail_parse_error!("DoublyQualified should have been rewritten in optimizer")
|
||||
}
|
||||
ast::Expr::Exists(_) => crate::bail_parse_error!("EXISTS in WHERE clause is not supported"),
|
||||
ast::Expr::FunctionCall {
|
||||
name,
|
||||
distinctness: _,
|
||||
@@ -1768,7 +1806,9 @@ pub fn translate_expr(
|
||||
Func::AlterTable(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
ast::Expr::FunctionCallStar { .. } => todo!("{:?}", &expr),
|
||||
ast::Expr::FunctionCallStar { .. } => {
|
||||
crate::bail_parse_error!("FunctionCallStar in WHERE clause is not supported")
|
||||
}
|
||||
ast::Expr::Id(id) => {
|
||||
// Treat double-quoted identifiers as string literals (SQLite compatibility)
|
||||
program.emit_insn(Insn::String8 {
|
||||
@@ -1979,8 +2019,12 @@ pub fn translate_expr(
|
||||
|
||||
Ok(result_reg)
|
||||
}
|
||||
ast::Expr::InSelect { .. } => todo!(),
|
||||
ast::Expr::InTable { .. } => todo!(),
|
||||
ast::Expr::InSelect { .. } => {
|
||||
crate::bail_parse_error!("IN (...subquery) in WHERE clause is not supported")
|
||||
}
|
||||
ast::Expr::InTable { .. } => {
|
||||
crate::bail_parse_error!("Table expression in WHERE clause is not supported")
|
||||
}
|
||||
ast::Expr::IsNull(expr) => {
|
||||
let reg = program.alloc_register();
|
||||
translate_expr(program, referenced_tables, expr, reg, resolver)?;
|
||||
@@ -2016,7 +2060,9 @@ pub fn translate_expr(
|
||||
Ok(target_register)
|
||||
}
|
||||
ast::Expr::Literal(lit) => emit_literal(program, lit, target_register),
|
||||
ast::Expr::Name(_) => todo!(),
|
||||
ast::Expr::Name(_) => {
|
||||
crate::bail_parse_error!("ast::Expr::Name in WHERE clause is not supported")
|
||||
}
|
||||
ast::Expr::NotNull(expr) => {
|
||||
let reg = program.alloc_register();
|
||||
translate_expr(program, referenced_tables, expr, reg, resolver)?;
|
||||
@@ -2051,15 +2097,19 @@ pub fn translate_expr(
|
||||
} else {
|
||||
// Parenthesized expressions with multiple arguments are reserved for special cases
|
||||
// like `(a, b) IN ((1, 2), (3, 4))`.
|
||||
todo!("TODO: parenthesized expression with multiple arguments not yet supported");
|
||||
crate::bail_parse_error!(
|
||||
"TODO: parenthesized expression with multiple arguments not yet supported"
|
||||
);
|
||||
}
|
||||
Ok(target_register)
|
||||
}
|
||||
ast::Expr::Qualified(_, _) => {
|
||||
unreachable!("Qualified should be resolved to a Column before translation")
|
||||
}
|
||||
ast::Expr::Raise(_, _) => todo!(),
|
||||
ast::Expr::Subquery(_) => todo!(),
|
||||
ast::Expr::Raise(_, _) => crate::bail_parse_error!("RAISE is not supported"),
|
||||
ast::Expr::Subquery(_) => {
|
||||
crate::bail_parse_error!("Subquery in WHERE clause is not supported")
|
||||
}
|
||||
ast::Expr::Unary(op, expr) => match (op, expr.as_ref()) {
|
||||
(UnaryOperator::Positive, expr) => {
|
||||
translate_expr(program, referenced_tables, expr, target_register, resolver)
|
||||
@@ -2848,8 +2898,8 @@ fn translate_like_base(
|
||||
},
|
||||
});
|
||||
}
|
||||
ast::LikeOperator::Match => todo!(),
|
||||
ast::LikeOperator::Regexp => todo!(),
|
||||
ast::LikeOperator::Match => crate::bail_parse_error!("MATCH in LIKE is not supported"),
|
||||
ast::LikeOperator::Regexp => crate::bail_parse_error!("REGEXP in LIKE is not supported"),
|
||||
}
|
||||
|
||||
Ok(target_register)
|
||||
@@ -3582,7 +3632,9 @@ pub fn emit_literal(
|
||||
});
|
||||
Ok(target_register)
|
||||
}
|
||||
ast::Literal::Keyword(_) => todo!(),
|
||||
ast::Literal::Keyword(_) => {
|
||||
crate::bail_parse_error!("Keyword in WHERE clause is not supported")
|
||||
}
|
||||
ast::Literal::Null => {
|
||||
program.emit_insn(Insn::Null {
|
||||
dest: target_register,
|
||||
|
||||
@@ -88,7 +88,7 @@ pub fn optimize_select_plan(plan: &mut SelectPlan, schema: &Schema) -> Result<()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn optimize_delete_plan(plan: &mut DeletePlan, _schema: &Schema) -> Result<()> {
|
||||
fn optimize_delete_plan(plan: &mut DeletePlan, schema: &Schema) -> Result<()> {
|
||||
rewrite_exprs_delete(plan)?;
|
||||
if let ConstantConditionEliminationResult::ImpossibleCondition =
|
||||
eliminate_constant_conditions(&mut plan.where_clause)?
|
||||
@@ -97,15 +97,14 @@ fn optimize_delete_plan(plan: &mut DeletePlan, _schema: &Schema) -> Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// FIXME: don't use indexes for delete right now because it's buggy. See for example:
|
||||
// https://github.com/tursodatabase/turso/issues/1714
|
||||
// let _ = optimize_table_access(
|
||||
// &mut plan.table_references,
|
||||
// &schema.indexes,
|
||||
// &mut plan.where_clause,
|
||||
// &mut plan.order_by,
|
||||
// &mut None,
|
||||
// )?;
|
||||
let _ = optimize_table_access(
|
||||
schema,
|
||||
&mut plan.table_references,
|
||||
&schema.indexes,
|
||||
&mut plan.where_clause,
|
||||
&mut plan.order_by,
|
||||
&mut None,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -996,6 +996,12 @@ fn parse_join(
|
||||
|
||||
let (outer, natural) = match join_operator {
|
||||
ast::JoinOperator::TypedJoin(Some(join_type)) => {
|
||||
if join_type.contains(JoinType::RIGHT) {
|
||||
crate::bail_parse_error!("RIGHT JOIN is not supported");
|
||||
}
|
||||
if join_type.contains(JoinType::CROSS) {
|
||||
crate::bail_parse_error!("CROSS JOIN is not supported");
|
||||
}
|
||||
let is_outer = join_type.contains(JoinType::OUTER);
|
||||
let is_natural = join_type.contains(JoinType::NATURAL);
|
||||
(is_outer, is_natural)
|
||||
|
||||
@@ -226,10 +226,7 @@ where
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
match crate::numeric::str_to_f64(s) {
|
||||
Some(result) => Ok(match result {
|
||||
crate::numeric::StrToF64::Fractional(non_nan) => non_nan.into(),
|
||||
crate::numeric::StrToF64::Decimal(non_nan) => non_nan.into(),
|
||||
}),
|
||||
Some(result) => Ok(result.into()),
|
||||
None => Err(serde::de::Error::custom("")),
|
||||
}
|
||||
}
|
||||
@@ -667,7 +664,7 @@ impl PartialEq<Value> for Value {
|
||||
match (self, other) {
|
||||
(Self::Integer(int_left), Self::Integer(int_right)) => int_left == int_right,
|
||||
(Self::Integer(int), Self::Float(float)) | (Self::Float(float), Self::Integer(int)) => {
|
||||
int_float_cmp(*int, *float).is_eq()
|
||||
sqlite_int_float_compare(*int, *float).is_eq()
|
||||
}
|
||||
(Self::Float(float_left), Self::Float(float_right)) => float_left == float_right,
|
||||
(Self::Integer(_) | Self::Float(_), Self::Text(_) | Self::Blob(_)) => false,
|
||||
@@ -682,32 +679,17 @@ impl PartialEq<Value> for Value {
|
||||
}
|
||||
}
|
||||
|
||||
fn int_float_cmp(int: i64, float: f64) -> std::cmp::Ordering {
|
||||
if float.is_nan() {
|
||||
return std::cmp::Ordering::Greater;
|
||||
}
|
||||
|
||||
if float < -9223372036854775808.0 {
|
||||
return std::cmp::Ordering::Greater;
|
||||
}
|
||||
|
||||
if float >= 9223372036854775808.0 {
|
||||
return std::cmp::Ordering::Less;
|
||||
}
|
||||
|
||||
match int.cmp(&(float as i64)) {
|
||||
std::cmp::Ordering::Equal => (int as f64).total_cmp(&float),
|
||||
cmp => cmp,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::non_canonical_partial_ord_impl)]
|
||||
impl PartialOrd<Value> for Value {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
match (self, other) {
|
||||
(Self::Integer(int_left), Self::Integer(int_right)) => int_left.partial_cmp(int_right),
|
||||
(Self::Float(float), Self::Integer(int)) => Some(int_float_cmp(*int, *float).reverse()),
|
||||
(Self::Integer(int), Self::Float(float)) => Some(int_float_cmp(*int, *float)),
|
||||
(Self::Float(float), Self::Integer(int)) => {
|
||||
Some(sqlite_int_float_compare(*int, *float).reverse())
|
||||
}
|
||||
(Self::Integer(int), Self::Float(float)) => {
|
||||
Some(sqlite_int_float_compare(*int, *float))
|
||||
}
|
||||
(Self::Float(float_left), Self::Float(float_right)) => {
|
||||
float_left.partial_cmp(float_right)
|
||||
}
|
||||
|
||||
241
core/util.rs
241
core/util.rs
@@ -1,5 +1,6 @@
|
||||
#![allow(unused)]
|
||||
use crate::incremental::view::IncrementalView;
|
||||
use crate::numeric::StrToF64;
|
||||
use crate::translate::expr::WalkControl;
|
||||
use crate::types::IOResult;
|
||||
use crate::{
|
||||
@@ -169,139 +170,22 @@ pub fn parse_schema_rows(
|
||||
StepResult::Row => {
|
||||
let row = rows.row().unwrap();
|
||||
let ty = row.get::<&str>(0)?;
|
||||
match ty {
|
||||
"table" => {
|
||||
let root_page: i64 = row.get::<i64>(3)?;
|
||||
let sql: &str = row.get::<&str>(4)?;
|
||||
let sql_bytes = sql.as_bytes();
|
||||
if root_page == 0
|
||||
&& contains_ignore_ascii_case!(sql_bytes, b"create virtual")
|
||||
{
|
||||
let name: &str = row.get::<&str>(1)?;
|
||||
// a virtual table is found in the sqlite_schema, but it's no
|
||||
// longer in the in-memory schema. We need to recreate it if
|
||||
// the module is loaded in the symbol table.
|
||||
let vtab = if let Some(vtab) = syms.vtabs.get(name) {
|
||||
vtab.clone()
|
||||
} else {
|
||||
let mod_name = module_name_from_sql(sql)?;
|
||||
crate::VirtualTable::table(
|
||||
Some(name),
|
||||
mod_name,
|
||||
module_args_from_sql(sql)?,
|
||||
syms,
|
||||
)?
|
||||
};
|
||||
schema.add_virtual_table(vtab);
|
||||
} else {
|
||||
let table = schema::BTreeTable::from_sql(sql, root_page as usize)?;
|
||||
|
||||
// Check if this is a DBSP state table
|
||||
if table.name.starts_with(DBSP_TABLE_PREFIX) {
|
||||
// Extract the view name from __turso_internal_dbsp_state_<viewname>
|
||||
let view_name = table
|
||||
.name
|
||||
.strip_prefix(DBSP_TABLE_PREFIX)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
dbsp_state_roots.insert(view_name, root_page as usize);
|
||||
}
|
||||
|
||||
schema.add_btree_table(Arc::new(table));
|
||||
}
|
||||
}
|
||||
"index" => {
|
||||
let root_page: i64 = row.get::<i64>(3)?;
|
||||
match row.get::<&str>(4) {
|
||||
Ok(sql) => {
|
||||
from_sql_indexes.push(UnparsedFromSqlIndex {
|
||||
table_name: row.get::<&str>(2)?.to_string(),
|
||||
root_page: root_page as usize,
|
||||
sql: sql.to_string(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
// Automatic index on primary key and/or unique constraint, e.g.
|
||||
// table|foo|foo|2|CREATE TABLE foo (a text PRIMARY KEY, b)
|
||||
// index|sqlite_autoindex_foo_1|foo|3|
|
||||
let index_name = row.get::<&str>(1)?.to_string();
|
||||
let table_name = row.get::<&str>(2)?.to_string();
|
||||
let root_page = row.get::<i64>(3)?;
|
||||
match automatic_indices.entry(table_name) {
|
||||
std::collections::hash_map::Entry::Vacant(e) => {
|
||||
e.insert(vec![(index_name, root_page as usize)]);
|
||||
}
|
||||
std::collections::hash_map::Entry::Occupied(mut e) => {
|
||||
e.get_mut().push((index_name, root_page as usize));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"view" => {
|
||||
use crate::incremental::view::IncrementalView;
|
||||
use crate::schema::View;
|
||||
use fallible_iterator::FallibleIterator;
|
||||
use turso_parser::ast::{Cmd, Stmt};
|
||||
use turso_parser::parser::Parser;
|
||||
|
||||
let name: &str = row.get::<&str>(1)?;
|
||||
let root_page = row.get::<i64>(3)?;
|
||||
let sql: &str = row.get::<&str>(4)?;
|
||||
let view_name = name.to_string();
|
||||
|
||||
// Parse the SQL to determine if it's a regular or materialized view
|
||||
let mut parser = Parser::new(sql.as_bytes());
|
||||
if let Ok(Some(Cmd::Stmt(stmt))) = parser.next_cmd() {
|
||||
match stmt {
|
||||
Stmt::CreateMaterializedView { .. } => {
|
||||
// Store materialized view info for later creation
|
||||
// We'll handle reuse logic and create the actual IncrementalView
|
||||
// in a later pass when we have both the main root page and DBSP state root
|
||||
materialized_view_info.insert(
|
||||
view_name.clone(),
|
||||
(sql.to_string(), root_page as usize),
|
||||
);
|
||||
|
||||
// Mark the existing view for potential reuse
|
||||
if existing_views.contains_key(&view_name) {
|
||||
// We'll check for reuse in the third pass
|
||||
}
|
||||
}
|
||||
Stmt::CreateView {
|
||||
view_name: _,
|
||||
columns: column_names,
|
||||
select,
|
||||
..
|
||||
} => {
|
||||
// Extract actual columns from the SELECT statement
|
||||
let view_columns =
|
||||
crate::util::extract_view_columns(&select, schema);
|
||||
|
||||
// If column names were provided in CREATE VIEW (col1, col2, ...),
|
||||
// use them to rename the columns
|
||||
let mut final_columns = view_columns;
|
||||
for (i, indexed_col) in column_names.iter().enumerate() {
|
||||
if let Some(col) = final_columns.get_mut(i) {
|
||||
col.name = Some(indexed_col.col_name.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Create regular view
|
||||
let view = View {
|
||||
name: name.to_string(),
|
||||
sql: sql.to_string(),
|
||||
select_stmt: select,
|
||||
columns: final_columns,
|
||||
};
|
||||
schema.add_view(view);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
let name = row.get::<&str>(1)?;
|
||||
let table_name = row.get::<&str>(2)?;
|
||||
let root_page = row.get::<i64>(3)?;
|
||||
let sql = row.get::<&str>(4).ok();
|
||||
schema.handle_schema_row(
|
||||
ty,
|
||||
name,
|
||||
table_name,
|
||||
root_page,
|
||||
sql,
|
||||
syms,
|
||||
&mut from_sql_indexes,
|
||||
&mut automatic_indices,
|
||||
&mut dbsp_state_roots,
|
||||
&mut materialized_view_info,
|
||||
)?;
|
||||
}
|
||||
StepResult::IO => {
|
||||
// TODO: How do we ensure that the I/O we submitted to
|
||||
@@ -313,88 +197,9 @@ pub fn parse_schema_rows(
|
||||
StepResult::Busy => break,
|
||||
}
|
||||
}
|
||||
for unparsed_sql_from_index in from_sql_indexes {
|
||||
if !schema.indexes_enabled() {
|
||||
schema.table_set_has_index(&unparsed_sql_from_index.table_name);
|
||||
} else {
|
||||
let table = schema
|
||||
.get_btree_table(&unparsed_sql_from_index.table_name)
|
||||
.unwrap();
|
||||
let index = schema::Index::from_sql(
|
||||
&unparsed_sql_from_index.sql,
|
||||
unparsed_sql_from_index.root_page,
|
||||
table.as_ref(),
|
||||
)?;
|
||||
schema.add_index(Arc::new(index));
|
||||
}
|
||||
}
|
||||
for automatic_index in automatic_indices {
|
||||
if !schema.indexes_enabled() {
|
||||
schema.table_set_has_index(&automatic_index.0);
|
||||
} else {
|
||||
let table = schema.get_btree_table(&automatic_index.0).unwrap();
|
||||
let ret_index = schema::Index::automatic_from_primary_key_and_unique(
|
||||
table.as_ref(),
|
||||
automatic_index.1,
|
||||
)?;
|
||||
for index in ret_index {
|
||||
schema.add_index(Arc::new(index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Third pass: Create materialized views now that we have both root pages
|
||||
for (view_name, (sql, main_root)) in materialized_view_info {
|
||||
// Look up the DBSP state root for this view - must exist for materialized views
|
||||
let dbsp_state_root = dbsp_state_roots.get(&view_name).ok_or_else(|| {
|
||||
LimboError::InternalError(format!(
|
||||
"Materialized view {view_name} is missing its DBSP state table"
|
||||
))
|
||||
})?;
|
||||
|
||||
// Check if we can reuse the existing view
|
||||
let mut reuse_view = false;
|
||||
if let Some(existing_view_mutex) = schema.get_materialized_view(&view_name) {
|
||||
let existing_view = existing_view_mutex.lock().unwrap();
|
||||
if let Some(existing_sql) = schema.materialized_view_sql.get(&view_name) {
|
||||
if existing_sql == &sql {
|
||||
reuse_view = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if reuse_view {
|
||||
// View already exists with same SQL, just update dependencies
|
||||
let existing_view_mutex = schema.get_materialized_view(&view_name).unwrap();
|
||||
let existing_view = existing_view_mutex.lock().unwrap();
|
||||
let referenced_tables = existing_view.get_referenced_table_names();
|
||||
drop(existing_view); // Release lock before modifying schema
|
||||
for table_name in referenced_tables {
|
||||
schema.add_materialized_view_dependency(&table_name, &view_name);
|
||||
}
|
||||
} else {
|
||||
// Create new IncrementalView with both root pages
|
||||
let incremental_view =
|
||||
IncrementalView::from_sql(&sql, schema, main_root, *dbsp_state_root)?;
|
||||
let referenced_tables = incremental_view.get_referenced_table_names();
|
||||
|
||||
// Create a Table for the materialized view
|
||||
let table = Arc::new(schema::Table::BTree(Arc::new(schema::BTreeTable {
|
||||
root_page: main_root,
|
||||
name: view_name.clone(),
|
||||
columns: incremental_view.columns.clone(), // Use the view's columns, not the base table's
|
||||
primary_key_columns: vec![],
|
||||
has_rowid: true,
|
||||
is_strict: false,
|
||||
unique_sets: None,
|
||||
})));
|
||||
|
||||
schema.add_materialized_view(incremental_view, table, sql.clone());
|
||||
for table_name in referenced_tables {
|
||||
schema.add_materialized_view_dependency(&table_name, &view_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
schema.populate_indices(from_sql_indexes, automatic_indices)?;
|
||||
schema.populate_materialized_views(materialized_view_info, dbsp_state_roots)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1185,8 +990,12 @@ pub fn parse_numeric_literal(text: &str) -> Result<Value> {
|
||||
return Ok(Value::Integer(int_value));
|
||||
}
|
||||
|
||||
let float_value = text.parse::<f64>()?;
|
||||
Ok(Value::Float(float_value))
|
||||
let Some(StrToF64::Fractional(float) | StrToF64::Decimal(float)) =
|
||||
crate::numeric::str_to_f64(text)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
Ok(Value::Float(float.into()))
|
||||
}
|
||||
|
||||
pub fn parse_signed_number(expr: &Expr) -> Result<Value> {
|
||||
|
||||
@@ -2187,9 +2187,14 @@ pub fn op_transaction(
|
||||
match pager.begin_write_tx()? {
|
||||
IOResult::Done(r) => {
|
||||
if let LimboResult::Busy = r {
|
||||
pager.end_read_tx()?;
|
||||
conn.transaction_state.replace(TransactionState::None);
|
||||
conn.auto_commit.replace(true);
|
||||
// We failed to upgrade to write transaction so put the transaction into its original state.
|
||||
// That is, if the transaction had not started, end the read transaction so that next time we
|
||||
// start a new one.
|
||||
if matches!(current_state, TransactionState::None) {
|
||||
pager.end_read_tx()?;
|
||||
conn.transaction_state.replace(TransactionState::None);
|
||||
}
|
||||
assert_eq!(conn.transaction_state.get(), current_state);
|
||||
return Ok(InsnFunctionStepResult::Busy);
|
||||
}
|
||||
}
|
||||
@@ -2696,6 +2701,7 @@ pub enum OpSeekKey {
|
||||
IndexKeyFromRegister(usize),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum OpSeekState {
|
||||
/// Initial state
|
||||
Start,
|
||||
@@ -3006,12 +3012,21 @@ pub fn seek_internal(
|
||||
|
||||
// this same logic applies for indexes, but the next/prev record is expected to be found in the parent page's
|
||||
// divider cell.
|
||||
turso_assert!(
|
||||
!cursor.skip_advance.get(),
|
||||
"skip_advance should not be true in the middle of a seek operation"
|
||||
);
|
||||
let result = match op {
|
||||
// deliberately call get_next_record() instead of next() to avoid skip_advance triggering unwantedly
|
||||
SeekOp::GT | SeekOp::GE { .. } => cursor.next()?,
|
||||
SeekOp::LT | SeekOp::LE { .. } => cursor.prev()?,
|
||||
};
|
||||
match result {
|
||||
IOResult::Done(found) => found,
|
||||
IOResult::Done(found) => {
|
||||
cursor.has_record.set(found);
|
||||
cursor.invalidate_record();
|
||||
found
|
||||
}
|
||||
IOResult::IO(io) => return Ok(SeekInternalResult::IO(io)),
|
||||
}
|
||||
};
|
||||
@@ -5692,6 +5707,7 @@ pub fn op_idx_delete(
|
||||
);
|
||||
|
||||
loop {
|
||||
#[cfg(debug_assertions)]
|
||||
tracing::debug!(
|
||||
"op_idx_delete(cursor_id={}, start_reg={}, num_regs={}, rootpage={}, state={:?})",
|
||||
cursor_id,
|
||||
@@ -5725,9 +5741,11 @@ pub fn op_idx_delete(
|
||||
// If P5 is not zero, then raise an SQLITE_CORRUPT_INDEX error if no matching index entry is found
|
||||
// Also, do not raise this (self-correcting and non-critical) error if in writable_schema mode.
|
||||
if *raise_error_if_no_matching_entry {
|
||||
let record = make_record(&state.registers, start_reg, num_regs);
|
||||
let reg_values = (*start_reg..*start_reg + *num_regs)
|
||||
.map(|i| &state.registers[i])
|
||||
.collect::<Vec<_>>();
|
||||
return Err(LimboError::Corrupt(format!(
|
||||
"IdxDelete: no matching index entry found for record {record:?}"
|
||||
"IdxDelete: no matching index entry found for key {reg_values:?}"
|
||||
)));
|
||||
}
|
||||
state.pc += 1;
|
||||
@@ -5744,9 +5762,11 @@ pub fn op_idx_delete(
|
||||
};
|
||||
|
||||
if rowid.is_none() && *raise_error_if_no_matching_entry {
|
||||
let reg_values = (*start_reg..*start_reg + *num_regs)
|
||||
.map(|i| &state.registers[i])
|
||||
.collect::<Vec<_>>();
|
||||
return Err(LimboError::Corrupt(format!(
|
||||
"IdxDelete: no matching index entry found for record {:?}",
|
||||
make_record(&state.registers, start_reg, num_regs)
|
||||
"IdxDelete: no matching index entry found for key {reg_values:?}"
|
||||
)));
|
||||
}
|
||||
state.op_idx_delete_state = Some(OpIdxDeleteState::Deleting);
|
||||
@@ -7344,6 +7364,9 @@ pub fn op_integrity_check(
|
||||
let mut current_root_idx = 0;
|
||||
// check freelist pages first, if there are any for database
|
||||
if freelist_trunk_page > 0 {
|
||||
let expected_freelist_count =
|
||||
return_if_io!(pager.with_header(|header| header.freelist_pages.get()));
|
||||
integrity_check_state.set_expected_freelist_count(expected_freelist_count as usize);
|
||||
integrity_check_state.start(
|
||||
freelist_trunk_page as usize,
|
||||
PageCategory::FreeListTrunk,
|
||||
@@ -7370,6 +7393,14 @@ pub fn op_integrity_check(
|
||||
*current_root_idx += 1;
|
||||
return Ok(InsnFunctionStepResult::Step);
|
||||
} else {
|
||||
if integrity_check_state.freelist_count.actual_count
|
||||
!= integrity_check_state.freelist_count.expected_count
|
||||
{
|
||||
errors.push(IntegrityCheckError::FreelistCountMismatch {
|
||||
actual_count: integrity_check_state.freelist_count.actual_count,
|
||||
expected_count: integrity_check_state.freelist_count.expected_count,
|
||||
});
|
||||
}
|
||||
let message = if errors.is_empty() {
|
||||
"ok".to_string()
|
||||
} else {
|
||||
@@ -8187,27 +8218,19 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
fn to_f64(&self) -> Option<f64> {
|
||||
match self {
|
||||
Value::Integer(i) => Some(*i as f64),
|
||||
Value::Float(f) => Some(*f),
|
||||
Value::Text(t) => t.as_str().parse::<f64>().ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn exec_math_unary(&self, function: &MathFunc) -> Value {
|
||||
let v = Numeric::from_value_strict(self);
|
||||
|
||||
// In case of some functions and integer input, return the input as is
|
||||
if let Value::Integer(_) = self {
|
||||
if let Numeric::Integer(i) = v {
|
||||
if matches! { function, MathFunc::Ceil | MathFunc::Ceiling | MathFunc::Floor | MathFunc::Trunc }
|
||||
{
|
||||
return self.clone();
|
||||
return Value::Integer(i);
|
||||
}
|
||||
}
|
||||
|
||||
let f = match self.to_f64() {
|
||||
Some(f) => f,
|
||||
None => return Value::Null,
|
||||
let Some(f) = v.try_into_f64() else {
|
||||
return Value::Null;
|
||||
};
|
||||
|
||||
let result = match function {
|
||||
@@ -8244,14 +8267,12 @@ impl Value {
|
||||
}
|
||||
|
||||
fn exec_math_binary(&self, rhs: &Value, function: &MathFunc) -> Value {
|
||||
let lhs = match self.to_f64() {
|
||||
Some(f) => f,
|
||||
None => return Value::Null,
|
||||
let Some(lhs) = Numeric::from_value_strict(self).try_into_f64() else {
|
||||
return Value::Null;
|
||||
};
|
||||
|
||||
let rhs = match rhs.to_f64() {
|
||||
Some(f) => f,
|
||||
None => return Value::Null,
|
||||
let Some(rhs) = Numeric::from_value_strict(rhs).try_into_f64() else {
|
||||
return Value::Null;
|
||||
};
|
||||
|
||||
let result = match function {
|
||||
@@ -8269,16 +8290,13 @@ impl Value {
|
||||
}
|
||||
|
||||
fn exec_math_log(&self, base: Option<&Value>) -> Value {
|
||||
let f = match self.to_f64() {
|
||||
Some(f) => f,
|
||||
None => return Value::Null,
|
||||
let Some(f) = Numeric::from_value_strict(self).try_into_f64() else {
|
||||
return Value::Null;
|
||||
};
|
||||
|
||||
let base = match base {
|
||||
Some(base) => match base.to_f64() {
|
||||
Some(f) => f,
|
||||
None => return Value::Null,
|
||||
},
|
||||
let base = match base.map(|value| Numeric::from_value_strict(value).try_into_f64()) {
|
||||
Some(Some(f)) => f,
|
||||
Some(None) => return Value::Null,
|
||||
None => 10.0,
|
||||
};
|
||||
|
||||
@@ -8359,11 +8377,9 @@ impl Value {
|
||||
|
||||
pub fn exec_concat(&self, rhs: &Value) -> Value {
|
||||
if let (Value::Blob(lhs), Value::Blob(rhs)) = (self, rhs) {
|
||||
return Value::build_text(String::from_utf8_lossy(dbg!(&[
|
||||
lhs.as_slice(),
|
||||
rhs.as_slice()
|
||||
]
|
||||
.concat())));
|
||||
return Value::build_text(String::from_utf8_lossy(
|
||||
&[lhs.as_slice(), rhs.as_slice()].concat(),
|
||||
));
|
||||
}
|
||||
|
||||
let Some(lhs) = self.cast_text() else {
|
||||
|
||||
@@ -238,12 +238,15 @@ impl Sorter {
|
||||
InitChunkHeapState::Start => {
|
||||
let mut completions: Vec<Completion> = Vec::with_capacity(self.chunks.len());
|
||||
for chunk in self.chunks.iter_mut() {
|
||||
let c = chunk.read().inspect_err(|_| {
|
||||
for c in completions.iter() {
|
||||
c.abort();
|
||||
match chunk.read() {
|
||||
Err(e) => {
|
||||
tracing::error!("Failed to read chunk: {e}");
|
||||
self.io.cancel(&completions)?;
|
||||
self.io.drain()?;
|
||||
return Err(e);
|
||||
}
|
||||
})?;
|
||||
completions.push(c);
|
||||
Ok(c) => completions.push(c),
|
||||
};
|
||||
}
|
||||
self.init_chunk_heap_state = InitChunkHeapState::PushChunk;
|
||||
io_yield_many!(completions);
|
||||
|
||||
10
fuzz/Cargo.lock
generated
10
fuzz/Cargo.lock
generated
@@ -1182,7 +1182,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_core"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"aegis",
|
||||
"aes",
|
||||
@@ -1225,7 +1225,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_ext"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"getrandom 0.3.1",
|
||||
@@ -1234,7 +1234,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_macros"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1243,7 +1243,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_parser"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"miette",
|
||||
@@ -1255,7 +1255,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "turso_sqlite3_parser"
|
||||
version = "0.1.5-pre.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cc",
|
||||
|
||||
@@ -116,11 +116,26 @@ impl rusqlite::types::FromSql for Value {
|
||||
}
|
||||
}
|
||||
|
||||
str_enum! {
|
||||
enum UnaryFunc {
|
||||
Ceil => "ceil",
|
||||
Floor => "floor",
|
||||
}
|
||||
}
|
||||
|
||||
str_enum! {
|
||||
enum BinaryFunc {
|
||||
Power => "pow",
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Arbitrary)]
|
||||
enum Expr {
|
||||
Value(Value),
|
||||
Binary(Binary, Box<Expr>, Box<Expr>),
|
||||
Unary(Unary, Box<Expr>),
|
||||
UnaryFunc(UnaryFunc, Box<Expr>),
|
||||
BinaryFunc(BinaryFunc, Box<Expr>, Box<Expr>),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -158,6 +173,26 @@ impl Expr {
|
||||
depth: lhs.depth.max(rhs.depth) + 1,
|
||||
}
|
||||
}
|
||||
Expr::BinaryFunc(func, lhs, rhs) => {
|
||||
let mut lhs = lhs.lower();
|
||||
let mut rhs = rhs.lower();
|
||||
Output {
|
||||
query: format!("{func}({}, {})", lhs.query, rhs.query),
|
||||
parameters: {
|
||||
lhs.parameters.append(&mut rhs.parameters);
|
||||
lhs.parameters
|
||||
},
|
||||
depth: lhs.depth.max(rhs.depth) + 1,
|
||||
}
|
||||
}
|
||||
Expr::UnaryFunc(func, expr) => {
|
||||
let expr = expr.lower();
|
||||
Output {
|
||||
query: format!("{func}({})", expr.query),
|
||||
parameters: expr.parameters,
|
||||
depth: expr.depth + 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,6 +223,17 @@ export class Connection {
|
||||
this.isOpen = false;
|
||||
await this.session.close();
|
||||
}
|
||||
|
||||
async reconnect(): Promise<void> {
|
||||
try {
|
||||
if (this.isOpen) {
|
||||
await this.close();
|
||||
}
|
||||
} finally {
|
||||
this.session = new Session(this.config);
|
||||
this.isOpen = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
39
scripts/clean_interactions.sh
Executable file
39
scripts/clean_interactions.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Clean lines from simulator output by:
|
||||
# 1) Removing everything up to and including "interaction="
|
||||
# 2) Replacing everything from "}:" to the end with a single semicolon
|
||||
# 3) Only retaining lines containing CREATE/INSERT/UPDATE/DELETE/DROP (the rest are usually meaningless for debugging)
|
||||
#
|
||||
# The purpose of this is to transform the interaction plan into a list of executable SQL statements
|
||||
# in cases where:
|
||||
# 1. Shrinking the plan failed
|
||||
# 2. We know the point at which the simulator failure occurred.
|
||||
#
|
||||
# I use this script like this in the simulator directory:
|
||||
# cargo run &> raw_output.txt
|
||||
# manually edit out the shrinking parts and the WarGames intro graphics etc and save the file
|
||||
# then run:
|
||||
# ./clean_interactions.sh raw_output.txt > interactions.sql
|
||||
#
|
||||
# Usage:
|
||||
# clean_interactions.sh INPUT [OUTPUT]
|
||||
#
|
||||
# If OUTPUT is omitted, the result is written to stdout.
|
||||
|
||||
if [[ $# -lt 1 || $# -gt 2 ]]; then
|
||||
echo "Usage: $0 INPUT [OUTPUT]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
input_path="$1"
|
||||
output_path="${2:-}"
|
||||
|
||||
if [[ -z "${output_path}" ]]; then
|
||||
awk '{ line=$0; sub(/^[^\n]*interaction=/, "", line); sub(/}:.*/, ";", line); print line }' "${input_path}" | grep -E 'CREATE|INSERT|UPDATE|DELETE|DROP'
|
||||
else
|
||||
awk '{ line=$0; sub(/^[^\n]*interaction=/, "", line); sub(/}:.*/, ";", line); print line }' "${input_path}" | grep -E 'CREATE|INSERT|UPDATE|DELETE|DROP' > "${output_path}"
|
||||
fi
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user