turso-sync-js package

This commit is contained in:
Nikita Sivukhin
2025-08-08 01:16:59 +04:00
parent 405e7f56a6
commit e6a3ee59d2
24 changed files with 5875 additions and 0 deletions

14
Cargo.lock generated
View File

@@ -4381,6 +4381,20 @@ dependencies = [
"uuid",
]
[[package]]
name = "turso_sync_js"
version = "0.1.4-pre.5"
dependencies = [
"genawaiter",
"http",
"napi",
"napi-build",
"napi-derive",
"turso_core",
"turso_node",
"turso_sync_engine",
]
[[package]]
name = "typenum"
version = "1.18.0"

View File

@@ -28,6 +28,7 @@ members = [
"vendored/sqlite3-parser/sqlparser_bench",
"packages/turso-sync",
"packages/turso-sync-engine",
"packages/turso-sync-js",
]
exclude = ["perf/latency/limbo"]
@@ -42,6 +43,7 @@ repository = "https://github.com/tursodatabase/turso"
turso = { path = "bindings/rust", version = "0.1.4-pre.5" }
limbo_completion = { path = "extensions/completion", version = "0.1.4-pre.5" }
turso_core = { path = "core", version = "0.1.4-pre.5" }
turso_sync_engine = { path = "packages/turso-sync-engine", version = "0.1.4-pre.5" }
limbo_crypto = { path = "extensions/crypto", version = "0.1.4-pre.5" }
limbo_csv = { path = "extensions/csv", version = "0.1.4-pre.5" }
turso_ext = { path = "extensions/core", version = "0.1.4-pre.5" }

View File

@@ -0,0 +1,2 @@
[target.x86_64-pc-windows-msvc]
rustflags = ["-C", "target-feature=+crt-static"]

View File

@@ -0,0 +1,20 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:base", "group:allNonMajor", ":preserveSemverRanges", ":disablePeerDependencies"],
"labels": ["dependencies"],
"packageRules": [
{
"matchPackageNames": ["@napi/cli", "napi", "napi-build", "napi-derive"],
"addLabels": ["napi-rs"],
"groupName": "napi-rs"
},
{
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
"groupName": "linter"
}
],
"commitMessagePrefix": "chore: ",
"commitMessageAction": "bump up",
"commitMessageTopic": "{{depName}} version",
"ignoreDeps": []
}

132
packages/turso-sync-js/.gitignore vendored Normal file
View File

@@ -0,0 +1,132 @@
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# End of https://www.toptal.com/developers/gitignore/api/node
#Added by cargo
/target
Cargo.lock
*.node
*.wasm
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
/npm

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
nodeLinker: node-modules
npmAuditRegistry: "https://registry.npmjs.org"
yarnPath: .yarn/releases/yarn-4.9.2.cjs

View File

@@ -0,0 +1,22 @@
[package]
name = "turso_sync_js"
version.workspace = true
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
[lib]
crate-type = ["cdylib"]
[dependencies]
http = "1.3.1"
napi = { version = "3.1.3", default-features = false, features = ["napi6", "dyn-symbols"] }
napi-derive = { version = "3.1.1", default-features = true }
turso_sync_engine = { workspace = true }
turso_core = { workspace = true }
turso_node = { path = "../../bindings/javascript" }
genawaiter = { version = "0.99.1", default-features = false }
[build-dependencies]
napi-build = "2.2.3"

View File

@@ -0,0 +1 @@
# turso-sync-js package

View File

@@ -0,0 +1 @@
export * from 'turso-sync-js-wasm32-wasi'

View File

@@ -0,0 +1,3 @@
fn main() {
napi_build::setup();
}

161
packages/turso-sync-js/index.d.ts vendored Normal file
View File

@@ -0,0 +1,161 @@
/* auto-generated by NAPI-RS */
/* eslint-disable */
/** A database connection. */
export declare class Database {
/**
* Creates a new database instance.
*
* # Arguments
* * `path` - The path to the database file.
*/
constructor(path: string)
/** Returns whether the database is in memory-only mode. */
get memory(): boolean
/**
* Executes a batch of SQL statements.
*
* # Arguments
*
* * `sql` - The SQL statements to execute.
*
* # Returns
*/
batch(sql: string): void
/**
* Prepares a statement for execution.
*
* # Arguments
*
* * `sql` - The SQL statement to prepare.
*
* # Returns
*
* A `Statement` instance.
*/
prepare(sql: string): Statement
/**
* Returns the rowid of the last row inserted.
*
* # Returns
*
* The rowid of the last row inserted.
*/
lastInsertRowid(): number
/**
* Returns the number of changes made by the last statement.
*
* # Returns
*
* The number of changes made by the last statement.
*/
changes(): number
/**
* Returns the total number of changes made by all statements.
*
* # Returns
*
* The total number of changes made by all statements.
*/
totalChanges(): number
/**
* Closes the database connection.
*
* # Returns
*
* `Ok(())` if the database is closed successfully.
*/
close(): void
/** Runs the I/O loop synchronously. */
ioLoopSync(): void
/** Runs the I/O loop asynchronously, returning a Promise. */
ioLoopAsync(): Promise<void>
}
/** A prepared statement. */
export declare class Statement {
reset(): void
/** Returns the number of parameters in the statement. */
parameterCount(): number
/**
* Returns the name of a parameter at a specific 1-based index.
*
* # Arguments
*
* * `index` - The 1-based parameter index.
*/
parameterName(index: number): string | null
/**
* Binds a parameter at a specific 1-based index with explicit type.
*
* # Arguments
*
* * `index` - The 1-based parameter index.
* * `value_type` - The type constant (0=null, 1=int, 2=float, 3=text, 4=blob).
* * `value` - The value to bind.
*/
bindAt(index: number, value: unknown): void
/**
* Step the statement and return result code:
* 1 = Row available, 2 = Done, 3 = I/O needed
*/
step(): number
/** Get the current row data according to the presentation mode */
row(): unknown
/** Sets the presentation mode to raw. */
raw(raw?: boolean | undefined | null): void
/** Sets the presentation mode to pluck. */
pluck(pluck?: boolean | undefined | null): void
/** Finalizes the statement. */
finalize(): void
}
export declare class GeneratorHolder {
resume(error?: string | undefined | null): number
}
export declare class JsDataCompletion {
poison(err: string): void
status(value: number): void
push(value: Buffer): void
done(): void
}
export declare class JsDataPollResult {
}
export declare class JsProtocolIo {
takeRequest(): JsProtocolRequestData | null
}
export declare class JsProtocolRequestData {
request(): JsProtocolRequest
completion(): JsDataCompletion
}
export declare class SyncEngine {
constructor(opts: SyncEngineOpts)
init(): GeneratorHolder
ioLoopSync(): void
/** Runs the I/O loop asynchronously, returning a Promise. */
ioLoopAsync(): Promise<void>
protocolIo(): JsProtocolRequestData | null
sync(): GeneratorHolder
push(): GeneratorHolder
pull(): GeneratorHolder
open(): Database
}
export interface DatabaseOpts {
path: string
}
export type JsProtocolRequest =
| { type: 'Http', method: string, path: string, body?: Buffer }
| { type: 'FullRead', path: string }
| { type: 'FullWrite', path: string, content: Buffer }
export interface SyncEngineOpts {
path: string
clientName?: string
walPullBatchSize?: number
}

View File

@@ -0,0 +1,403 @@
// prettier-ignore
/* eslint-disable */
// @ts-nocheck
/* auto-generated by NAPI-RS */
const { createRequire } = require('node:module')
require = createRequire(__filename)
const { readFileSync } = require('node:fs')
let nativeBinding = null
const loadErrors = []
const isMusl = () => {
let musl = false
if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
} catch {
return null
}
}
const isMuslFromReport = () => {
let report = null
if (typeof process.report?.getReport === 'function') {
process.report.excludeNetwork = true
report = process.report.getReport()
}
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true
}
}
return false
}
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
}
}
function requireNative() {
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
try {
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
} catch (err) {
loadErrors.push(err)
}
} else if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./turso-sync-js.android-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-android-arm64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./turso-sync-js.android-arm-eabi.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-android-arm-eabi')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
}
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
try {
return require('./turso-sync-js.win32-x64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-win32-x64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'ia32') {
try {
return require('./turso-sync-js.win32-ia32-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-win32-ia32-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./turso-sync-js.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-win32-arm64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
} else if (process.platform === 'darwin') {
try {
return require('./turso-sync-js.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-darwin-universal')
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./turso-sync-js.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-darwin-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./turso-sync-js.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-darwin-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./turso-sync-js.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-freebsd-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./turso-sync-js.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-freebsd-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
try {
return require('./turso-sync-js.linux-x64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-x64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./turso-sync-js.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-x64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
try {
return require('./turso-sync-js.linux-arm64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./turso-sync-js.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
try {
return require('./turso-sync-js.linux-arm-musleabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm-musleabihf')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./turso-sync-js.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm-gnueabihf')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
try {
return require('./turso-sync-js.linux-riscv64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-riscv64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
return require('./turso-sync-js.linux-riscv64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-riscv64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./turso-sync-js.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-ppc64-gnu')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./turso-sync-js.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-s390x-gnu')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
} else if (process.platform === 'openharmony') {
if (process.arch === 'arm64') {
try {
return require('./turso-sync-js.linux-arm64-ohos.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm64-ohos')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'x64') {
try {
return require('./turso-sync-js.linux-x64-ohos.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-x64-ohos')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./turso-sync-js.linux-arm-ohos.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('turso-sync-js-linux-arm-ohos')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
}
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
try {
nativeBinding = require('./turso-sync-js.wasi.cjs')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
}
if (!nativeBinding) {
try {
nativeBinding = require('turso-sync-js-wasm32-wasi')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
loadErrors.push(err)
}
}
}
}
if (!nativeBinding) {
if (loadErrors.length > 0) {
throw new Error(
`Cannot find native binding. ` +
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
{ cause: loadErrors }
)
}
throw new Error(`Failed to load native binding`)
}
module.exports = nativeBinding
module.exports.Database = nativeBinding.Database
module.exports.Statement = nativeBinding.Statement
module.exports.GeneratorHolder = nativeBinding.GeneratorHolder
module.exports.JsDataCompletion = nativeBinding.JsDataCompletion
module.exports.JsDataPollResult = nativeBinding.JsDataPollResult
module.exports.JsProtocolIo = nativeBinding.JsProtocolIo
module.exports.JsProtocolRequestData = nativeBinding.JsProtocolRequestData
module.exports.SyncEngine = nativeBinding.SyncEngine

View File

@@ -0,0 +1,101 @@
{
"name": "turso-sync-js",
"version": "1.0.0",
"description": "Template project for writing node package with napi-rs",
"main": "index.js",
"repository": "git@github.com:napi-rs/package-template.git",
"license": "MIT",
"keywords": [
"napi-rs",
"NAPI",
"N-API",
"Rust",
"node-addon",
"node-addon-api"
],
"files": [
"index.d.ts",
"index.js"
],
"napi": {
"binaryName": "turso-sync-js",
"targets": [
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
"aarch64-apple-darwin",
"wasm32-wasip1-threads"
]
},
"engines": {
"node": ">= 6.14.2 < 7 || >= 8.11.2 < 9 || >= 9.11.0 < 10 || >= 10.0.0"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/",
"access": "public"
},
"scripts": {
"artifacts": "napi artifacts",
"bench": "node --import @oxc-node/core/register benchmark/bench.ts && tsc -p tsconfig.json",
"build": "napi build --platform --release --esm",
"build:debug": "napi build --platform",
"format": "run-p format:prettier format:rs format:toml",
"format:prettier": "prettier . -w",
"format:toml": "taplo format",
"format:rs": "cargo fmt",
"lint": "oxlint .",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"version": "napi version",
"prepare": "husky"
},
"devDependencies": {
"@emnapi/core": "^1.4.5",
"@emnapi/runtime": "^1.4.5",
"@napi-rs/cli": "^3.0.3",
"@oxc-node/core": "^0.0.30",
"@taplo/cli": "^0.7.0",
"@tybys/wasm-util": "^0.10.0",
"ava": "^6.4.1",
"chalk": "^5.4.1",
"husky": "^9.1.7",
"lint-staged": "^16.1.2",
"npm-run-all2": "^8.0.4",
"oxlint": "^1.8.0",
"prettier": "^3.6.2",
"tinybench": "^4.0.1",
"typescript": "^5.8.3"
},
"lint-staged": {
"*.@(js|ts|tsx)": [
"oxlint --fix"
],
"*.@(js|ts|tsx|yml|yaml|md|json)": [
"prettier --write"
],
"*.toml": [
"taplo format"
]
},
"ava": {
"extensions": {
"ts": "module"
},
"timeout": "2m",
"workerThreads": false,
"environmentVariables": {
"OXC_TSCONFIG_PATH": "./__test__/tsconfig.json"
},
"nodeArguments": [
"--import",
"@oxc-node/core/register"
]
},
"prettier": {
"printWidth": 120,
"semi": false,
"trailingComma": "all",
"singleQuote": true,
"arrowParens": "always"
},
"packageManager": "yarn@4.9.2"
}

View File

@@ -0,0 +1,45 @@
use napi_derive::napi;
use std::{future::Future, sync::Mutex};
use turso_sync_engine::types::ProtocolCommand;
pub const GENERATOR_RESUME_IO: u32 = 0;
pub const GENERATOR_RESUME_DONE: u32 = 1;
pub trait Generator {
fn resume(&mut self, result: Option<String>) -> napi::Result<u32>;
}
impl<F: Future<Output = turso_sync_engine::Result<()>>> Generator
for genawaiter::sync::Gen<ProtocolCommand, turso_sync_engine::Result<()>, F>
{
fn resume(&mut self, error: Option<String>) -> napi::Result<u32> {
let result = match error {
Some(err) => Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
format!("JsProtocolIo error: {err}"),
)),
None => Ok(()),
};
match self.resume_with(result) {
genawaiter::GeneratorState::Yielded(ProtocolCommand::IO) => Ok(GENERATOR_RESUME_IO),
genawaiter::GeneratorState::Complete(Ok(())) => Ok(GENERATOR_RESUME_DONE),
genawaiter::GeneratorState::Complete(Err(err)) => Err(napi::Error::new(
napi::Status::GenericFailure,
format!("sync engine operation failed: {err}"),
)),
}
}
}
#[napi]
pub struct GeneratorHolder {
pub(crate) inner: Box<Mutex<dyn Generator>>,
}
#[napi]
impl GeneratorHolder {
#[napi]
pub fn resume(&self, error: Option<String>) -> napi::Result<u32> {
self.inner.lock().unwrap().resume(error)
}
}

View File

@@ -0,0 +1,195 @@
#![deny(clippy::all)]
use std::{
collections::VecDeque,
sync::{Arc, Mutex, MutexGuard},
};
use napi::bindgen_prelude::*;
use napi_derive::napi;
use turso_sync_engine::protocol_io::{DataCompletion, DataPollResult, ProtocolIO};
#[napi]
pub enum JsProtocolRequest {
Http {
method: String,
path: String,
body: Option<Buffer>,
},
FullRead {
path: String,
},
FullWrite {
path: String,
content: Buffer,
},
}
#[derive(Clone)]
#[napi]
pub struct JsDataCompletion(Arc<Mutex<JsDataCompletionInner>>);
struct JsDataCompletionInner {
status: Option<u16>,
chunks: VecDeque<Buffer>,
finished: bool,
err: Option<String>,
}
impl JsDataCompletion {
fn inner(&self) -> turso_sync_engine::Result<MutexGuard<JsDataCompletionInner>> {
let inner = self.0.lock().unwrap();
if let Some(err) = &inner.err {
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
err.clone(),
));
}
Ok(inner)
}
}
impl DataCompletion for JsDataCompletion {
type DataPollResult = JsDataPollResult;
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
let inner = self.inner()?;
Ok(inner.status)
}
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
let mut inner = self.inner()?;
let chunk = inner.chunks.pop_front();
Ok(chunk.map(JsDataPollResult))
}
fn is_done(&self) -> turso_sync_engine::Result<bool> {
let inner = self.inner()?;
Ok(inner.finished)
}
}
#[napi]
impl JsDataCompletion {
#[napi]
pub fn poison(&self, err: String) {
let mut completion = self.0.lock().unwrap();
completion.err = Some(err);
}
#[napi]
pub fn status(&self, value: u32) {
let mut completion = self.0.lock().unwrap();
completion.status = Some(value as u16);
}
#[napi]
pub fn push(&self, value: Buffer) {
let mut completion = self.0.lock().unwrap();
completion.chunks.push_back(value);
}
#[napi]
pub fn done(&self) {
let mut completion = self.0.lock().unwrap();
completion.finished = true;
}
}
#[napi]
pub struct JsDataPollResult(Buffer);
impl DataPollResult for JsDataPollResult {
fn data(&self) -> &[u8] {
&self.0
}
}
#[napi]
pub struct JsProtocolRequestData {
request: Arc<Mutex<Option<JsProtocolRequest>>>,
completion: JsDataCompletion,
}
#[napi]
impl JsProtocolRequestData {
#[napi]
pub fn request(&self) -> JsProtocolRequest {
let mut request = self.request.lock().unwrap();
request.take().unwrap()
}
#[napi]
pub fn completion(&self) -> JsDataCompletion {
self.completion.clone()
}
}
impl ProtocolIO for JsProtocolIo {
type DataCompletion = JsDataCompletion;
fn http(
&self,
method: &str,
path: &str,
body: Option<Vec<u8>>,
) -> turso_sync_engine::Result<JsDataCompletion> {
Ok(self.add_request(JsProtocolRequest::Http {
method: method.to_string(),
path: path.to_string(),
body: body.map(Buffer::from),
}))
}
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletion> {
Ok(self.add_request(JsProtocolRequest::FullRead {
path: path.to_string(),
}))
}
fn full_write(
&self,
path: &str,
content: Vec<u8>,
) -> turso_sync_engine::Result<Self::DataCompletion> {
Ok(self.add_request(JsProtocolRequest::FullWrite {
path: path.to_string(),
content: Buffer::from(content),
}))
}
}
#[napi]
pub struct JsProtocolIo {
requests: Mutex<Vec<JsProtocolRequestData>>,
}
impl Default for JsProtocolIo {
fn default() -> Self {
Self {
requests: Mutex::new(Vec::new()),
}
}
}
#[napi]
impl JsProtocolIo {
#[napi]
pub fn take_request(&self) -> Option<JsProtocolRequestData> {
self.requests.lock().unwrap().pop()
}
fn add_request(&self, request: JsProtocolRequest) -> JsDataCompletion {
let completion = JsDataCompletionInner {
chunks: VecDeque::new(),
finished: false,
err: None,
status: None,
};
let completion = JsDataCompletion(Arc::new(Mutex::new(completion)));
let mut requests = self.requests.lock().unwrap();
requests.push(JsProtocolRequestData {
request: Arc::new(Mutex::new(Some(request))),
completion: completion.clone(),
});
completion
}
}

View File

@@ -0,0 +1,166 @@
#![deny(clippy::all)]
pub mod generator;
pub mod js_protocol_io;
use std::sync::{Arc, Mutex};
use napi::bindgen_prelude::AsyncTask;
use napi_derive::napi;
use turso_node::IoLoopTask;
use turso_sync_engine::{
database_sync_engine::{DatabaseSyncEngine, DatabaseSyncEngineOpts},
types::Coro,
};
use crate::{
generator::GeneratorHolder,
js_protocol_io::{JsProtocolIo, JsProtocolRequestData},
};
#[napi(object)]
pub struct DatabaseOpts {
pub path: String,
}
#[napi]
pub struct SyncEngine {
path: String,
client_name: String,
wal_pull_batch_size: u32,
io: Arc<dyn turso_core::IO>,
protocol: Arc<JsProtocolIo>,
sync_engine: Arc<Mutex<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
opened: Arc<Mutex<Option<turso_node::Database>>>,
}
#[napi(object)]
pub struct SyncEngineOpts {
pub path: String,
pub client_name: Option<String>,
pub wal_pull_batch_size: Option<u32>,
}
#[napi]
impl SyncEngine {
#[napi(constructor)]
pub fn new(opts: SyncEngineOpts) -> napi::Result<Self> {
Ok(SyncEngine {
path: opts.path,
client_name: opts.client_name.unwrap_or("turso-sync-js".to_string()),
wal_pull_batch_size: opts.wal_pull_batch_size.unwrap_or(100),
sync_engine: Arc::new(Mutex::new(None)),
io: Arc::new(turso_core::PlatformIO::new().map_err(|e| {
napi::Error::new(
napi::Status::GenericFailure,
format!("Failed to create IO: {e}"),
)
})?),
protocol: Arc::new(JsProtocolIo::default()),
#[allow(clippy::arc_with_non_send_sync)]
opened: Arc::new(Mutex::new(None)),
})
}
#[napi]
pub fn init(&self) -> GeneratorHolder {
let opts = DatabaseSyncEngineOpts {
client_name: self.client_name.clone(),
wal_pull_batch_size: self.wal_pull_batch_size as u64,
};
let protocol = self.protocol.clone();
let sync_engine = self.sync_engine.clone();
let io = self.io.clone();
let opened = self.opened.clone();
let path = self.path.clone();
let generator = genawaiter::sync::Gen::new(|coro| async move {
let initialized = DatabaseSyncEngine::new(&coro, io.clone(), protocol, &path, opts).await?;
let connection = initialized.connect(&coro).await?;
let db = turso_node::Database::create(None, io.clone(), connection, false);
*sync_engine.lock().unwrap() = Some(initialized);
*opened.lock().unwrap() = Some(db);
Ok(())
});
GeneratorHolder {
inner: Box::new(Mutex::new(generator)),
}
}
#[napi]
pub fn io_loop_sync(&self) -> napi::Result<()> {
self
.io
.run_once()
.map_err(|e| napi::Error::new(napi::Status::GenericFailure, format!("IO error: {e}")))?;
Ok(())
}
/// Runs the I/O loop asynchronously, returning a Promise.
#[napi(ts_return_type = "Promise<void>")]
pub fn io_loop_async(&self) -> AsyncTask<IoLoopTask> {
let io = self.io.clone();
AsyncTask::new(IoLoopTask { io })
}
#[napi]
pub fn protocol_io(&self) -> Option<JsProtocolRequestData> {
self.protocol.take_request()
}
#[napi]
pub fn sync(&self) -> GeneratorHolder {
self.run(async move |coro, sync_engine| sync_engine.sync(coro).await)
}
#[napi]
pub fn push(&self) -> GeneratorHolder {
self.run(async move |coro, sync_engine| sync_engine.push(coro).await)
}
#[napi]
pub fn pull(&self) -> GeneratorHolder {
self.run(async move |coro, sync_engine| sync_engine.pull(coro).await)
}
#[napi]
pub fn open(&self) -> napi::Result<turso_node::Database> {
let opened = self.opened.lock().unwrap();
let Some(opened) = opened.as_ref() else {
return Err(napi::Error::new(
napi::Status::GenericFailure,
"sync_engine must be initialized".to_string(),
));
};
Ok(opened.clone())
}
fn run(
&self,
f: impl AsyncFnOnce(&Coro, &mut DatabaseSyncEngine<JsProtocolIo>) -> turso_sync_engine::Result<()>
+ 'static,
) -> GeneratorHolder {
let sync_engine = self.sync_engine.clone();
#[allow(clippy::await_holding_lock)]
let generator = genawaiter::sync::Gen::new(|coro| async move {
let Ok(mut sync_engine) = sync_engine.try_lock() else {
let nasty_error = "sync_engine is busy".to_string();
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
nasty_error,
));
};
let Some(sync_engine) = sync_engine.as_mut() else {
let error = "sync_engine must be initialized".to_string();
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
error,
));
};
f(&coro, sync_engine).await?;
Ok(())
});
GeneratorHolder {
inner: Box::new(Mutex::new(generator)),
}
}
}

View File

@@ -0,0 +1,86 @@
"use strict";
const { SyncEngine } = require("./index.js");
const Database = require("../../bindings/javascript/promise.js")
const fs = require('node:fs');
const GENERATOR_RESUME_IO = 0;
const GENERATOR_RESUME_DONE = 1;
async function process(httpOpts, request) {
const requestType = request.request();
const completion = request.completion();
if (requestType.type == 'Http') {
try {
const response = await fetch(`${httpOpts.url}${requestType.path}`, {
method: requestType.method,
headers: httpOpts.headers,
body: requestType.body
});
completion.status(response.status);
const reader = response.body.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
completion.done();
break;
}
completion.push(value);
}
} catch (error) {
completion.poison(`fetch error: ${error}`);
}
} else if (requestType.type == 'FullRead') {
try {
const metadata = await fs.promises.readFile(requestType.path);
completion.push(metadata);
completion.done();
} catch (error) {
if (error.code === 'ENOENT') {
completion.done();
} else {
completion.poison(`metadata read error: ${error}`);
}
}
} else if (requestType.type == 'FullWrite') {
try {
const unix = Math.floor(Date.now() / 1000);
const nonce = Math.floor(Math.random() * 1000000000);
const tmp = `${requestType.path}.tmp.${unix}.${nonce}`;
await fs.promises.writeFile(tmp, requestType.content);
await fs.promises.rename(tmp, requestType.path);
completion.done();
} catch (error) {
completion.poison(`metadata write error: ${error}`);
}
}
}
async function run(httpOpts, engine, generator) {
while (generator.resume(null) !== GENERATOR_RESUME_DONE) {
const tasks = [engine.ioLoopAsync()];
for (let request = engine.protocolIo(); request != null; request = engine.protocolIo()) {
tasks.push(process(httpOpts, request));
}
await Promise.all(tasks);
}
}
export async function connect(opts) {
const engine = new SyncEngine({ path: opts.path, clientName: opts.clientName });
const httpOpts = {
url: opts.url,
headers: {
"Authorization": `Bearer ${opts.authToken}`,
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
}
};
await run(httpOpts, engine, engine.init());
const nativeDb = engine.open();
const db = Database.create();
db.initialize(nativeDb, opts.path, false);
db.sync = async function () { await run(httpOpts, engine, engine.sync()); }
db.pull = async function () { await run(httpOpts, engine, engine.pull()); }
db.push = async function () { await run(httpOpts, engine, engine.push()); }
return db;
}

View File

@@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "ESNext",
"strict": true,
"moduleResolution": "node",
"module": "CommonJS",
"noUnusedLocals": true,
"noUnusedParameters": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true
},
"include": ["."],
"exclude": ["node_modules", "bench", "__test__"]
}

View File

@@ -0,0 +1,66 @@
import {
createOnMessage as __wasmCreateOnMessageForFsProxy,
getDefaultContext as __emnapiGetDefaultContext,
instantiateNapiModuleSync as __emnapiInstantiateNapiModuleSync,
WASI as __WASI,
} from '@napi-rs/wasm-runtime'
const __wasi = new __WASI({
version: 'preview1',
})
const __wasmUrl = new URL('./turso-sync-js.wasm32-wasi.wasm', import.meta.url).href
const __emnapiContext = __emnapiGetDefaultContext()
const __sharedMemory = new WebAssembly.Memory({
initial: 4000,
maximum: 65536,
shared: true,
})
const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
const {
instance: __napiInstance,
module: __wasiModule,
napiModule: __napiModule,
} = __emnapiInstantiateNapiModuleSync(__wasmFile, {
context: __emnapiContext,
asyncWorkPoolSize: 4,
wasi: __wasi,
onCreateWorker() {
const worker = new Worker(new URL('./wasi-worker-browser.mjs', import.meta.url), {
type: 'module',
})
return worker
},
overwriteImports(importObject) {
importObject.env = {
...importObject.env,
...importObject.napi,
...importObject.emnapi,
memory: __sharedMemory,
}
return importObject
},
beforeInit({ instance }) {
for (const name of Object.keys(instance.exports)) {
if (name.startsWith('__napi_register__')) {
instance.exports[name]()
}
}
},
})
export default __napiModule.exports
export const Database = __napiModule.exports.Database
export const Statement = __napiModule.exports.Statement
export const GeneratorHolder = __napiModule.exports.GeneratorHolder
export const JsDataCompletion = __napiModule.exports.JsDataCompletion
export const JsDataPollResult = __napiModule.exports.JsDataPollResult
export const JsProtocolIo = __napiModule.exports.JsProtocolIo
export const JsProtocolRequestData = __napiModule.exports.JsProtocolRequestData
export const SyncEngine = __napiModule.exports.SyncEngine

View File

@@ -0,0 +1,118 @@
/* eslint-disable */
/* prettier-ignore */
/* auto-generated by NAPI-RS */
const __nodeFs = require('node:fs')
const __nodePath = require('node:path')
const { WASI: __nodeWASI } = require('node:wasi')
const { Worker } = require('node:worker_threads')
const {
createOnMessage: __wasmCreateOnMessageForFsProxy,
getDefaultContext: __emnapiGetDefaultContext,
instantiateNapiModuleSync: __emnapiInstantiateNapiModuleSync,
} = require('@napi-rs/wasm-runtime')
const __rootDir = __nodePath.parse(process.cwd()).root
const __wasi = new __nodeWASI({
version: 'preview1',
env: process.env,
preopens: {
[__rootDir]: __rootDir,
}
})
const __emnapiContext = __emnapiGetDefaultContext()
const __sharedMemory = new WebAssembly.Memory({
initial: 4000,
maximum: 65536,
shared: true,
})
let __wasmFilePath = __nodePath.join(__dirname, 'turso-sync-js.wasm32-wasi.wasm')
const __wasmDebugFilePath = __nodePath.join(__dirname, 'turso-sync-js.wasm32-wasi.debug.wasm')
if (__nodeFs.existsSync(__wasmDebugFilePath)) {
__wasmFilePath = __wasmDebugFilePath
} else if (!__nodeFs.existsSync(__wasmFilePath)) {
try {
__wasmFilePath = __nodePath.resolve('turso-sync-js-wasm32-wasi')
} catch {
throw new Error('Cannot find turso-sync-js.wasm32-wasi.wasm file, and turso-sync-js-wasm32-wasi package is not installed.')
}
}
const { instance: __napiInstance, module: __wasiModule, napiModule: __napiModule } = __emnapiInstantiateNapiModuleSync(__nodeFs.readFileSync(__wasmFilePath), {
context: __emnapiContext,
asyncWorkPoolSize: (function() {
const threadsSizeFromEnv = Number(process.env.NAPI_RS_ASYNC_WORK_POOL_SIZE ?? process.env.UV_THREADPOOL_SIZE)
// NaN > 0 is false
if (threadsSizeFromEnv > 0) {
return threadsSizeFromEnv
} else {
return 4
}
})(),
reuseWorker: true,
wasi: __wasi,
onCreateWorker() {
const worker = new Worker(__nodePath.join(__dirname, 'wasi-worker.mjs'), {
env: process.env,
})
worker.onmessage = ({ data }) => {
__wasmCreateOnMessageForFsProxy(__nodeFs)(data)
}
// The main thread of Node.js waits for all the active handles before exiting.
// But Rust threads are never waited without `thread::join`.
// So here we hack the code of Node.js to prevent the workers from being referenced (active).
// According to https://github.com/nodejs/node/blob/19e0d472728c79d418b74bddff588bea70a403d0/lib/internal/worker.js#L415,
// a worker is consist of two handles: kPublicPort and kHandle.
{
const kPublicPort = Object.getOwnPropertySymbols(worker).find(s =>
s.toString().includes("kPublicPort")
);
if (kPublicPort) {
worker[kPublicPort].ref = () => {};
}
const kHandle = Object.getOwnPropertySymbols(worker).find(s =>
s.toString().includes("kHandle")
);
if (kHandle) {
worker[kHandle].ref = () => {};
}
worker.unref();
}
return worker
},
overwriteImports(importObject) {
importObject.env = {
...importObject.env,
...importObject.napi,
...importObject.emnapi,
memory: __sharedMemory,
}
return importObject
},
beforeInit({ instance }) {
for (const name of Object.keys(instance.exports)) {
if (name.startsWith('__napi_register__')) {
instance.exports[name]()
}
}
},
})
module.exports = __napiModule.exports
module.exports.Database = __napiModule.exports.Database
module.exports.Statement = __napiModule.exports.Statement
module.exports.GeneratorHolder = __napiModule.exports.GeneratorHolder
module.exports.JsDataCompletion = __napiModule.exports.JsDataCompletion
module.exports.JsDataPollResult = __napiModule.exports.JsDataPollResult
module.exports.JsProtocolIo = __napiModule.exports.JsProtocolIo
module.exports.JsProtocolRequestData = __napiModule.exports.JsProtocolRequestData
module.exports.SyncEngine = __napiModule.exports.SyncEngine

View File

@@ -0,0 +1,32 @@
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
const handler = new MessageHandler({
onLoad({ wasmModule, wasmMemory }) {
const wasi = new WASI({
print: function () {
// eslint-disable-next-line no-console
console.log.apply(console, arguments)
},
printErr: function() {
// eslint-disable-next-line no-console
console.error.apply(console, arguments)
},
})
return instantiateNapiModuleSync(wasmModule, {
childThread: true,
wasi,
overwriteImports(importObject) {
importObject.env = {
...importObject.env,
...importObject.napi,
...importObject.emnapi,
memory: wasmMemory,
}
},
})
},
})
globalThis.onmessage = function (e) {
handler.handle(e)
}

View File

@@ -0,0 +1,63 @@
import fs from "node:fs";
import { createRequire } from "node:module";
import { parse } from "node:path";
import { WASI } from "node:wasi";
import { parentPort, Worker } from "node:worker_threads";
const require = createRequire(import.meta.url);
const { instantiateNapiModuleSync, MessageHandler, getDefaultContext } = require("@napi-rs/wasm-runtime");
if (parentPort) {
parentPort.on("message", (data) => {
globalThis.onmessage({ data });
});
}
Object.assign(globalThis, {
self: globalThis,
require,
Worker,
importScripts: function (f) {
;(0, eval)(fs.readFileSync(f, "utf8") + "//# sourceURL=" + f);
},
postMessage: function (msg) {
if (parentPort) {
parentPort.postMessage(msg);
}
},
});
const emnapiContext = getDefaultContext();
const __rootDir = parse(process.cwd()).root;
const handler = new MessageHandler({
onLoad({ wasmModule, wasmMemory }) {
const wasi = new WASI({
version: 'preview1',
env: process.env,
preopens: {
[__rootDir]: __rootDir,
},
});
return instantiateNapiModuleSync(wasmModule, {
childThread: true,
wasi,
context: emnapiContext,
overwriteImports(importObject) {
importObject.env = {
...importObject.env,
...importObject.napi,
...importObject.emnapi,
memory: wasmMemory
};
},
});
},
});
globalThis.onmessage = function (e) {
handler.handle(e);
};

File diff suppressed because it is too large Load Diff