mirror of
https://github.com/aljazceru/turso.git
synced 2026-01-10 19:54:24 +01:00
opfs for sync in one commit!
This commit is contained in:
272
bindings/javascript/examples/browser/index.html
Normal file
272
bindings/javascript/examples/browser/index.html
Normal file
@@ -0,0 +1,272 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Brutal DB Viewer</title>
|
||||
<style>
|
||||
:root {
|
||||
--fg: #000;
|
||||
--bg: #fff;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
margin: 0 10%;
|
||||
padding: 0;
|
||||
background: var(--bg);
|
||||
color: var(--fg);
|
||||
font: 14px/1.4 ui-monospace, SFMono-Regular, Menlo, Consolas, "Liberation Mono", monospace;
|
||||
}
|
||||
|
||||
header {
|
||||
border-bottom: 2px solid #000;
|
||||
padding: 12px 16px;
|
||||
font-weight: 700;
|
||||
letter-spacing: .03em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
main {
|
||||
padding: 16px;
|
||||
display: grid;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
min-height: 128px;
|
||||
max-height: 60vh;
|
||||
resize: vertical;
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
button {
|
||||
appearance: none;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
border: 1px solid #000;
|
||||
padding: 6px 10px;
|
||||
cursor: pointer;
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
transform: translate(-1px, -1px);
|
||||
box-shadow: 2px 2px 0 #000;
|
||||
}
|
||||
|
||||
button:active {
|
||||
transform: translate(0, 0);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.status {
|
||||
margin-left: auto;
|
||||
opacity: .9;
|
||||
}
|
||||
|
||||
#result {
|
||||
border-top: 2px solid #000;
|
||||
padding-top: 12px;
|
||||
}
|
||||
|
||||
.meta {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.error {
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
margin-bottom: 8px;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.table-wrap {
|
||||
overflow: auto;
|
||||
border: 1px solid #000;
|
||||
max-height: 65vh;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
thead th {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
border: 1px solid #000;
|
||||
padding: 6px 8px;
|
||||
vertical-align: top;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
border: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<header>DB Viewer</header>
|
||||
<main>
|
||||
<section>
|
||||
<label for="sql">Query</label>
|
||||
<textarea id="sql" spellcheck="false" placeholder="SELECT * FROM people;">SELECT 'hello, world';</textarea>
|
||||
<div class="controls">
|
||||
<button id="run" type="button" title="Run (Ctrl/⌘ + Enter)">Run</button>
|
||||
<div class="status" id="status">Ready</div>
|
||||
</div>
|
||||
<div class="sr-only" aria-live="polite" id="live"></div>
|
||||
</section>
|
||||
|
||||
<section id="result">
|
||||
<div class="meta" id="meta">No results yet.</div>
|
||||
<div id="error" class="error" hidden></div>
|
||||
<div class="table-wrap">
|
||||
<table id="table" role="table" aria-label="Query results">
|
||||
<thead></thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<script type="module">
|
||||
import { connect } from "@tursodatabase/database-browser";
|
||||
const db = await connect('data.db');
|
||||
// --- Wire your DB here --------------------------------------------------
|
||||
// Provide window.executeQuery = async (sql) => ({ columns: string[], rows: any[][] })
|
||||
// If not provided, a tiny mock dataset is used for demo purposes.
|
||||
|
||||
(function () {
|
||||
const $ = (sel) => document.querySelector(sel);
|
||||
const sqlEl = $('#sql');
|
||||
const runBtn = $('#run');
|
||||
const statusEl = $('#status');
|
||||
const liveEl = $('#live');
|
||||
const metaEl = $('#meta');
|
||||
const errEl = $('#error');
|
||||
const thead = $('#table thead');
|
||||
const tbody = $('#table tbody');
|
||||
|
||||
function fmt(v) {
|
||||
if (v === null || v === undefined) return 'NULL';
|
||||
if (typeof v === 'object') {
|
||||
try { return JSON.stringify(v); } catch { return String(v); }
|
||||
}
|
||||
return String(v);
|
||||
}
|
||||
|
||||
function clearTable() { thead.innerHTML = ''; tbody.innerHTML = ''; }
|
||||
|
||||
function renderTable(result) {
|
||||
clearTable();
|
||||
const { columns = [], rows = [] } = result || {};
|
||||
|
||||
// Header
|
||||
const trh = document.createElement('tr');
|
||||
for (const name of columns) {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = String(name);
|
||||
trh.appendChild(th);
|
||||
}
|
||||
thead.appendChild(trh);
|
||||
|
||||
// Body
|
||||
const frag = document.createDocumentFragment();
|
||||
for (const r of rows) {
|
||||
const tr = document.createElement('tr');
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
const td = document.createElement('td');
|
||||
td.textContent = fmt(r[i] ?? null);
|
||||
tr.appendChild(td);
|
||||
}
|
||||
frag.appendChild(tr);
|
||||
}
|
||||
tbody.appendChild(frag);
|
||||
|
||||
metaEl.textContent = rows.length
|
||||
? `${rows.length} row${rows.length === 1 ? '' : 's'} × ${columns.length} column${columns.length === 1 ? '' : 's'}`
|
||||
: 'No rows.';
|
||||
}
|
||||
|
||||
async function run(sql) {
|
||||
// errEl.hidden = true; errEl.textContent = '';
|
||||
// statusEl.textContent = 'Running…';
|
||||
let t0 = performance.now();
|
||||
try {
|
||||
for (let i = 0; i < 1; i++) {
|
||||
await db.pingSync();
|
||||
}
|
||||
const res = {};
|
||||
// const stmt = await scheduler.postTask(async () => await db.prepare(sql), { priority: 'user-blocking' });
|
||||
// const columns = await scheduler.postTask(async () => (await stmt.columns()).map(x => x.name), { priority: 'user-blocking' });
|
||||
// const rows = await scheduler.postTask(async () => await stmt.all(), { priority: 'user-blocking' });
|
||||
// const res = {
|
||||
// columns: columns,
|
||||
// rows: rows.map(r => columns.map(c => r[c]))
|
||||
// };
|
||||
const t1 = performance.now();
|
||||
renderTable(res);
|
||||
const took = Math.max(0, t1 - t0);
|
||||
statusEl.textContent = `OK (${took}ms)`;
|
||||
liveEl.textContent = `Query finished in ${took} milliseconds.`;
|
||||
} catch (e) {
|
||||
clearTable();
|
||||
statusEl.textContent = 'ERROR';
|
||||
const msg = (e && (e.message || e.toString())) || 'Unknown error';
|
||||
errEl.textContent = 'ERROR: ' + msg;
|
||||
errEl.hidden = false;
|
||||
liveEl.textContent = 'Query failed.';
|
||||
}
|
||||
}
|
||||
|
||||
runBtn.addEventListener('click', () => run(sqlEl.value));
|
||||
sqlEl.addEventListener('keydown', (e) => {
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
run(sqlEl.value);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial demo run
|
||||
run(sqlEl.value);
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
19
bindings/javascript/examples/browser/package.json
Normal file
19
bindings/javascript/examples/browser/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-browser": "../../browser"
|
||||
}
|
||||
}
|
||||
22
bindings/javascript/examples/browser/vite.config.js
Normal file
22
bindings/javascript/examples/browser/vite.config.js
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.', '../../']
|
||||
},
|
||||
define:
|
||||
{
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false', // string replace at build-time
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
esbuildOptions: {
|
||||
define: { 'process.env.NODE_DEBUG_NATIVE': 'false' },
|
||||
},
|
||||
},
|
||||
})
|
||||
34
bindings/javascript/examples/wasm/index.html
Normal file
34
bindings/javascript/examples/wasm/index.html
Normal file
@@ -0,0 +1,34 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type"/>
|
||||
</head>
|
||||
<body>
|
||||
<button id="run">Run</button>
|
||||
<script type="module">
|
||||
import { Database, opfsSetup } from "@tursodatabase/database";
|
||||
var opfs = await opfsSetup("local.db");
|
||||
console.info(opfs);
|
||||
async function consume() {
|
||||
console.info('take', opfs.take());
|
||||
setTimeout(consume, 1000);
|
||||
}
|
||||
consume();
|
||||
async function tick() {
|
||||
console.info('tick');
|
||||
setTimeout(tick, 1000);
|
||||
}
|
||||
tick();
|
||||
|
||||
async function run() {
|
||||
const db = new Database(opfs);
|
||||
console.info('inited');
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
console.info('created');
|
||||
await db.exec("INSERT INTO t VALUES (1)");
|
||||
console.info('inserted');
|
||||
}
|
||||
document.getElementById("run").onclick = run;
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
19
bindings/javascript/examples/wasm/package.json
Normal file
19
bindings/javascript/examples/wasm/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "../.."
|
||||
}
|
||||
}
|
||||
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
minify: false, // Set this to false to disable minification
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'@tursodatabase/database-wasm32-wasi': '../../turso.wasi-browser.js'
|
||||
},
|
||||
},
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.']
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
exclude: [
|
||||
"@tursodatabase/database-wasm32-wasi",
|
||||
]
|
||||
},
|
||||
})
|
||||
19
bindings/javascript/package-lock.json
generated
19
bindings/javascript/package-lock.json
generated
@@ -9,7 +9,11 @@
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"packages/sync/common",
|
||||
"packages/sync/native",
|
||||
"packages/sync/browser"
|
||||
]
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
@@ -1103,6 +1107,10 @@
|
||||
"resolved": "packages/browser",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-browser-common": {
|
||||
"resolved": "packages/browser-common",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-common": {
|
||||
"resolved": "packages/common",
|
||||
"link": true
|
||||
@@ -2489,6 +2497,7 @@
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -2499,6 +2508,14 @@
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"packages/browser-common": {
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.1.5",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
},
|
||||
"packages/common": {
|
||||
"name": "@tursodatabase/database-common",
|
||||
"version": "0.1.5",
|
||||
|
||||
@@ -7,7 +7,11 @@
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"packages/sync/common",
|
||||
"packages/sync/native",
|
||||
"packages/sync/browser"
|
||||
],
|
||||
"version": "0.1.5"
|
||||
}
|
||||
|
||||
8
bindings/javascript/packages/browser-common/README.md
Normal file
8
bindings/javascript/packages/browser-common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/database` or `@tursodatabase/database-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
239
bindings/javascript/packages/browser-common/index.ts
Normal file
239
bindings/javascript/packages/browser-common/index.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
function getUint8ArrayFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): Uint8Array {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
function getStringFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): string {
|
||||
const shared = getUint8ArrayFromMemory(memory, ptr, len);
|
||||
const copy = new Uint8Array(shared.length);
|
||||
copy.set(shared);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
return decoder.decode(copy);
|
||||
}
|
||||
|
||||
interface BrowserImports {
|
||||
is_web_worker(): boolean;
|
||||
lookup_file(ptr: number, len: number): number;
|
||||
read(handle: number, ptr: number, len: number, offset: number): number;
|
||||
write(handle: number, ptr: number, len: number, offset: number): number;
|
||||
sync(handle: number): number;
|
||||
truncate(handle: number, len: number): number;
|
||||
size(handle: number): number;
|
||||
}
|
||||
|
||||
function panic(name): never {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const MainDummyImports: BrowserImports = {
|
||||
is_web_worker: function (): boolean {
|
||||
return false;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
panic("lookup_file")
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("read")
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("write")
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
panic("sync")
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
panic("truncate")
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
panic("size")
|
||||
}
|
||||
};
|
||||
|
||||
function workerImports(opfs: OpfsDirectory, memory: WebAssembly.Memory): BrowserImports {
|
||||
return {
|
||||
is_web_worker: function (): boolean {
|
||||
return true;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
try {
|
||||
const handle = opfs.lookupFileHandle(getStringFromMemory(memory, ptr, len));
|
||||
return handle == null ? -404 : handle;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.read(handle, getUint8ArrayFromMemory(memory, ptr, len), offset);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.write(handle, getUint8ArrayFromMemory(memory, ptr, len), offset)
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
try {
|
||||
opfs.sync(handle);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
try {
|
||||
opfs.truncate(handle, len);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
try {
|
||||
return opfs.size(handle);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OpfsDirectory {
|
||||
fileByPath: Map<String, { handle: number, sync: FileSystemSyncAccessHandle }>;
|
||||
fileByHandle: Map<number, FileSystemSyncAccessHandle>;
|
||||
fileHandleNo: number;
|
||||
|
||||
constructor() {
|
||||
this.fileByPath = new Map();
|
||||
this.fileByHandle = new Map();
|
||||
this.fileHandleNo = 0;
|
||||
}
|
||||
|
||||
async registerFile(path: string) {
|
||||
if (this.fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
this.fileHandleNo += 1;
|
||||
this.fileByPath.set(path, { handle: this.fileHandleNo, sync: opfsSync });
|
||||
this.fileByHandle.set(this.fileHandleNo, opfsSync);
|
||||
}
|
||||
|
||||
async unregisterFile(path: string) {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
this.fileByPath.delete(path);
|
||||
this.fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
lookupFileHandle(path: string): number | null {
|
||||
try {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', path, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
read(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
write(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
sync(handle: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
file.flush();
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
truncate(handle: number, size: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
size(handle: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var workerRequestId = 0;
|
||||
function waitForWorkerResponse(worker: Worker, id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => worker.removeEventListener("message", callback);
|
||||
|
||||
worker.addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
function registerFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "register", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
function unregisterFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "unregister", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
export { OpfsDirectory, workerImports, MainDummyImports, waitForWorkerResponse, registerFileAtWorker, unregisterFileAtWorker }
|
||||
25
bindings/javascript/packages/browser-common/package.json
Normal file
25
bindings/javascript/packages/browser-common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
@@ -25,10 +26,6 @@ const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
function panic(name) {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
@@ -49,14 +46,8 @@ const {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
is_web_worker: () => false,
|
||||
lookup_file: () => panic("lookup_file"),
|
||||
read: () => panic("read"),
|
||||
write: () => panic("write"),
|
||||
sync: () => panic("sync"),
|
||||
truncate: () => panic("truncate"),
|
||||
size: () => panic("size"),
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +1,24 @@
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError } from "@tursodatabase/database-common"
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError, } from "@tursodatabase/database-common"
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
|
||||
let workerRequestId = 0;
|
||||
class Database extends DatabasePromise {
|
||||
files: string[];
|
||||
constructor(db: NativeDatabase, files: string[], opts: DatabaseOpts = {}) {
|
||||
path: string | null;
|
||||
constructor(db: NativeDatabase, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.files = files;
|
||||
this.path = fsPath;
|
||||
}
|
||||
async close() {
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += this.files.length;
|
||||
|
||||
let tasks = [];
|
||||
for (const file of this.files) {
|
||||
(MainWorker as any).postMessage({ __turso__: "unregister", path: file, id: currentId });
|
||||
tasks.push(waitFor(currentId));
|
||||
currentId += 1;
|
||||
if (this.path != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.path),
|
||||
unregisterFileAtWorker(MainWorker, `${this.path}-wal`)
|
||||
]);
|
||||
}
|
||||
await Promise.all(tasks);
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
function waitFor(id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => (MainWorker as any).removeEventListener("message", callback);
|
||||
|
||||
(MainWorker as any).addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
@@ -55,24 +29,18 @@ function waitFor(id: number): Promise<any> {
|
||||
async function connect(path: string, opts: DatabaseOpts = {}): Promise<Database> {
|
||||
if (path == ":memory:") {
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
return new Database(db, [], opts);
|
||||
return new Database(db, null, opts);
|
||||
}
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += 2;
|
||||
|
||||
let dbHandlePromise = waitFor(currentId);
|
||||
let walHandlePromise = waitFor(currentId + 1);
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}`, id: currentId });
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}-wal`, id: currentId + 1 });
|
||||
await Promise.all([dbHandlePromise, walHandlePromise]);
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, path),
|
||||
registerFileAtWorker(MainWorker, `${path}-wal`)
|
||||
]);
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
const files = [path, `${path}-wal`];
|
||||
return new Database(db, files, opts);
|
||||
return new Database(db, path, opts);
|
||||
}
|
||||
|
||||
export { connect, Database, SqliteError }
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
|
||||
@@ -1,108 +1,9 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from '@tursodatabase/database-browser-common';
|
||||
|
||||
var fileByPath = new Map();
|
||||
var fileByHandle = new Map();
|
||||
let fileHandles = 0;
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
function getUint8ArrayFromWasm(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
|
||||
async function registerFile(path) {
|
||||
if (fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
fileHandles += 1;
|
||||
fileByPath.set(path, { handle: fileHandles, sync: opfsSync });
|
||||
fileByHandle.set(fileHandles, opfsSync);
|
||||
}
|
||||
|
||||
async function unregisterFile(path) {
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
fileByPath.delete(path);
|
||||
fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
|
||||
function lookup_file(pathPtr, pathLen) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(pathPtr, pathLen);
|
||||
const notShared = new Uint8Array(buffer.length);
|
||||
notShared.set(buffer);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const path = decoder.decode(notShared);
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return -404;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', pathPtr, pathLen, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function read(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function write(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function sync(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
file.flush();
|
||||
return 0;
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function truncate(handle, size) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function size(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
@@ -124,14 +25,8 @@ const handler = new MessageHandler({
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
is_web_worker: () => true,
|
||||
lookup_file: lookup_file,
|
||||
read: read,
|
||||
write: write,
|
||||
sync: sync,
|
||||
truncate: truncate,
|
||||
size: size,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -141,16 +36,16 @@ const handler = new MessageHandler({
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await registerFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await unregisterFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ export interface NativeDatabase {
|
||||
|
||||
prepare(sql: string): NativeStatement;
|
||||
|
||||
pluck(pluckMode: boolean);
|
||||
defaultSafeIntegers(toggle: boolean);
|
||||
totalChanges(): number;
|
||||
changes(): number;
|
||||
@@ -32,6 +31,11 @@ export const STEP_ROW = 1;
|
||||
export const STEP_DONE = 2;
|
||||
export const STEP_IO = 3;
|
||||
|
||||
export interface TableColumn {
|
||||
name: string,
|
||||
type: string
|
||||
}
|
||||
|
||||
export interface NativeStatement {
|
||||
stepAsync(): Promise<number>;
|
||||
stepSync(): number;
|
||||
@@ -39,7 +43,7 @@ export interface NativeStatement {
|
||||
pluck(pluckMode: boolean);
|
||||
safeIntegers(toggle: boolean);
|
||||
raw(toggle: boolean);
|
||||
columns(): string[];
|
||||
columns(): TableColumn[];
|
||||
row(): any;
|
||||
reset();
|
||||
finalize();
|
||||
|
||||
16
bindings/javascript/packages/native/index.d.ts
vendored
16
bindings/javascript/packages/native/index.d.ts
vendored
@@ -91,6 +91,14 @@ export declare class Database {
|
||||
ioLoopAsync(): Promise<void>
|
||||
}
|
||||
|
||||
export declare class Opfs {
|
||||
constructor()
|
||||
}
|
||||
|
||||
export declare class OpfsFile {
|
||||
|
||||
}
|
||||
|
||||
/** A prepared statement. */
|
||||
export declare class Statement {
|
||||
reset(): void
|
||||
@@ -144,6 +152,14 @@ export declare class Statement {
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export declare function connect(path: string, opts?: DatabaseOpts | undefined | null): Promise<unknown>
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* turso-db in the the browser requires explicit thread pool initialization
|
||||
* so, we just put no-op task on the thread pool and force emnapi to allocate web worker
|
||||
*/
|
||||
export declare function initThreadPool(): Promise<unknown>
|
||||
|
||||
@@ -508,6 +508,10 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement } = nativeBinding
|
||||
const { Database, Opfs, OpfsFile, Statement, connect, initThreadPool } = nativeBinding
|
||||
export { Database }
|
||||
export { Opfs }
|
||||
export { OpfsFile }
|
||||
export { Statement }
|
||||
export { connect }
|
||||
export { initThreadPool }
|
||||
|
||||
11
bindings/javascript/replace.sh
Normal file
11
bindings/javascript/replace.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/common/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/native/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/common/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/native/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/promise.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/compat.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/browser/promise.ts
|
||||
@@ -28,7 +28,6 @@ pub fn init_thread_pool() -> napi::Result<AsyncTask<NoopTask>> {
|
||||
|
||||
pub struct ConnectTask {
|
||||
path: String,
|
||||
is_memory: bool,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
}
|
||||
|
||||
@@ -70,7 +69,7 @@ impl Task for ConnectTask {
|
||||
Some(result.db),
|
||||
self.io.clone(),
|
||||
result.conn,
|
||||
self.is_memory,
|
||||
self.path.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -88,16 +87,11 @@ pub fn connect(path: String, opts: Option<DatabaseOpts>) -> Result<AsyncTask<Con
|
||||
let task = if is_memory(&path) {
|
||||
ConnectTask {
|
||||
io: Arc::new(turso_core::MemoryIO::new()),
|
||||
is_memory: true,
|
||||
path,
|
||||
}
|
||||
} else {
|
||||
let io = Arc::new(Opfs::new()?);
|
||||
ConnectTask {
|
||||
io,
|
||||
is_memory: false,
|
||||
path,
|
||||
}
|
||||
ConnectTask { io, path }
|
||||
};
|
||||
Ok(AsyncTask::new(task))
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ pub struct Database {
|
||||
_db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Option<Arc<turso_core::Connection>>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
is_open: Cell<bool>,
|
||||
default_safe_integers: Cell<bool>,
|
||||
}
|
||||
@@ -186,20 +186,20 @@ impl Database {
|
||||
.connect()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to connect: {e}")))?;
|
||||
|
||||
Ok(Self::create(Some(db), io, conn, is_memory(&path)))
|
||||
Ok(Self::create(Some(db), io, conn, path))
|
||||
}
|
||||
|
||||
pub fn create(
|
||||
db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
) -> Self {
|
||||
Database {
|
||||
_db: db,
|
||||
io,
|
||||
conn: Some(conn),
|
||||
is_memory,
|
||||
path,
|
||||
is_open: Cell::new(true),
|
||||
default_safe_integers: Cell::new(false),
|
||||
}
|
||||
@@ -218,7 +218,13 @@ impl Database {
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn memory(&self) -> bool {
|
||||
self.is_memory
|
||||
is_memory(&self.path)
|
||||
}
|
||||
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn path(&self) -> String {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
/// Returns whether the database connection is open.
|
||||
@@ -246,7 +252,7 @@ impl Database {
|
||||
/// * `sql` - The SQL statements to execute.
|
||||
///
|
||||
/// # Returns
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn batch_async(&self, sql: String) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Batch {
|
||||
conn: self.conn()?.clone(),
|
||||
@@ -319,7 +325,7 @@ impl Database {
|
||||
#[napi]
|
||||
pub fn close(&mut self) -> Result<()> {
|
||||
self.is_open.set(false);
|
||||
let _ = self._db.take().unwrap();
|
||||
let _ = self._db.take();
|
||||
let _ = self.conn.take().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
@@ -482,7 +488,7 @@ impl Statement {
|
||||
|
||||
/// Step the statement and return result code (executed on the background thread):
|
||||
/// 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<number>")]
|
||||
pub fn step_async(&self) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Step {
|
||||
stmt: self.stmt.clone(),
|
||||
@@ -577,7 +583,7 @@ impl Statement {
|
||||
}
|
||||
|
||||
/// Get column information for the statement
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<any>")]
|
||||
pub fn columns<'env>(&self, env: &'env Env) -> Result<Array<'env>> {
|
||||
let stmt_ref = self.stmt.borrow();
|
||||
let stmt = stmt_ref
|
||||
|
||||
26
bindings/javascript/sync/Cargo.toml
Normal file
26
bindings/javascript/sync/Cargo.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "turso_sync_js"
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
http = "1.3.1"
|
||||
napi = { version = "3.1.3", default-features = false, features = ["napi6"] }
|
||||
napi-derive = { version = "3.1.1", default-features = true }
|
||||
turso_sync_engine = { workspace = true }
|
||||
turso_core = { workspace = true }
|
||||
turso_node = { workspace = true }
|
||||
genawaiter = { version = "0.99.1", default-features = false }
|
||||
tracing-subscriber = "0.3.19"
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.2.3"
|
||||
|
||||
[features]
|
||||
browser = ["turso_node/browser"]
|
||||
60
bindings/javascript/sync/README.md
Normal file
60
bindings/javascript/sync/README.md
Normal file
@@ -0,0 +1,60 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Sync for JavaScript</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/sync"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/sync"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is for syncing local Turso databases to the Turso Cloud and back.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/sync
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
To sync a database hosted at [Turso Cloud](https://turso.tech):
|
||||
|
||||
```js
|
||||
import { connect } from '@tursodatabase/sync';
|
||||
|
||||
const db = await connect({
|
||||
path: 'local.db', // path used as a prefix for local files created by sync-engine
|
||||
url: 'https://<db>.turso.io', // URL of the remote database: turso db show <db>
|
||||
authToken: '...', // auth token issued from the Turso Cloud: turso db tokens create <db>
|
||||
clientName: 'turso-sync-example' // arbitrary client name
|
||||
});
|
||||
|
||||
// db has same functions as Database class from @tursodatabase/database package but adds few more methods for sync:
|
||||
await db.pull(); // pull changes from the remote
|
||||
await db.push(); // push changes to the remote
|
||||
await db.sync(); // pull & push changes
|
||||
```
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/database](https://www.npmjs.com/package/@tursodatabase/database) package provides the Turso in-memory database, compatible with SQLite.
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
3
bindings/javascript/sync/build.rs
Normal file
3
bindings/javascript/sync/build.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
fn main() {
|
||||
napi_build::setup();
|
||||
}
|
||||
124
bindings/javascript/sync/packages/browser/README.md
Normal file
124
bindings/javascript/sync/packages/browser/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Browser</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Browser.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database-browser
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
76
bindings/javascript/sync/packages/browser/index.js
Normal file
76
bindings/javascript/sync/packages/browser/index.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import {
|
||||
createOnMessage as __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext as __emnapiGetDefaultContext,
|
||||
instantiateNapiModule as __emnapiInstantiateNapiModule,
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
version: 'preview1',
|
||||
})
|
||||
|
||||
const __wasmUrl = new URL('./sync.wasm32-wasi.wasm', import.meta.url).href
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
|
||||
const __sharedMemory = new WebAssembly.Memory({
|
||||
initial: 4000,
|
||||
maximum: 65536,
|
||||
shared: true,
|
||||
})
|
||||
|
||||
const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
napiModule: __napiModule,
|
||||
} = await __emnapiInstantiateNapiModule(__wasmFile, {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: 1,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(new URL('./worker.mjs', import.meta.url), {
|
||||
type: 'module',
|
||||
})
|
||||
MainWorker = worker;
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
beforeInit({ instance }) {
|
||||
for (const name of Object.keys(instance.exports)) {
|
||||
if (name.startsWith('__napi_register__')) {
|
||||
instance.exports[name]()
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
export default __napiModule.exports
|
||||
export const Database = __napiModule.exports.Database
|
||||
export const Statement = __napiModule.exports.Statement
|
||||
export const Opfs = __napiModule.exports.Opfs
|
||||
export const OpfsFile = __napiModule.exports.OpfsFile
|
||||
export const connect = __napiModule.exports.connect
|
||||
export const initThreadPool = __napiModule.exports.initThreadPool
|
||||
export const GeneratorHolder = __napiModule.exports.GeneratorHolder
|
||||
export const JsDataCompletion = __napiModule.exports.JsDataCompletion
|
||||
export const JsProtocolIo = __napiModule.exports.JsProtocolIo
|
||||
export const JsProtocolRequestBytes = __napiModule.exports.JsProtocolRequestBytes
|
||||
export const SyncEngine = __napiModule.exports.SyncEngine
|
||||
export const DatabaseChangeTypeJs = __napiModule.exports.DatabaseChangeTypeJs
|
||||
export const SyncEngineProtocolVersion = __napiModule.exports.SyncEngineProtocolVersion
|
||||
|
||||
46
bindings/javascript/sync/packages/browser/package.json
Normal file
46
bindings/javascript/sync/packages/browser/package.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-browser",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/promise.js",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"index.js",
|
||||
"worker.mjs",
|
||||
"sync.wasm32-wasi.wasm",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"playwright": "^1.55.0",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --features browser --release --platform --target wasm32-wasip1-threads --no-js --manifest-path ../../Cargo.toml --output-dir . && rm index.d.ts sync.wasi* wasi* browser.js",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=chromium --run && VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=firefox --run"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/sync-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
}
|
||||
}
|
||||
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const stmt = db2.prepare('SELECT * FROM q');
|
||||
const rows = await stmt.all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
stmt.close();
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult, memoryIO } from "@tursodatabase/sync-common";
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
|
||||
let BrowserIo: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
const result = localStorage.getItem(path);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return new TextEncoder().encode(result);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const array = new Uint8Array(data);
|
||||
const value = new TextDecoder('utf-8').decode(array);
|
||||
localStorage.setItem(path, value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
fsPath: string | null;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.fsPath = fsPath;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.db.close();
|
||||
this.engine.close();
|
||||
if (this.fsPath != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.fsPath),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-wal`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-revert`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-info`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-changes`),
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
const isMemory = opts.path == ':memory:';
|
||||
let io = isMemory ? memoryIO() : BrowserIo;
|
||||
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
if (!isMemory) {
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, opts.path),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-wal`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-revert`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-info`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-changes`),
|
||||
]);
|
||||
}
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, isMemory ? null : opts.path, {});
|
||||
}
|
||||
|
||||
export { connect, Database, }
|
||||
export type { DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
24
bindings/javascript/sync/packages/browser/tsconfig.json
Normal file
24
bindings/javascript/sync/packages/browser/tsconfig.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
define: {
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false',
|
||||
},
|
||||
server: {
|
||||
headers: {
|
||||
"Cross-Origin-Embedder-Policy": "require-corp",
|
||||
"Cross-Origin-Opener-Policy": "same-origin"
|
||||
},
|
||||
},
|
||||
test: {
|
||||
browser: {
|
||||
enabled: true,
|
||||
provider: 'playwright',
|
||||
instances: [
|
||||
{ browser: 'chromium' },
|
||||
{ browser: 'firefox' }
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
55
bindings/javascript/sync/packages/browser/worker.mjs
Normal file
55
bindings/javascript/sync/packages/browser/worker.mjs
Normal file
@@ -0,0 +1,55 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
const wasi = new WASI({
|
||||
print: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log.apply(console, arguments)
|
||||
},
|
||||
printErr: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error.apply(console, arguments)
|
||||
},
|
||||
})
|
||||
return instantiateNapiModuleSync(wasmModule, {
|
||||
childThread: true,
|
||||
wasi,
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
}
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
}
|
||||
handler.handle(e)
|
||||
}
|
||||
8
bindings/javascript/sync/packages/common/README.md
Normal file
8
bindings/javascript/sync/packages/common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso Sync common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/sync` or `@tursodatabase/sync-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
5
bindings/javascript/sync/packages/common/index.ts
Normal file
5
bindings/javascript/sync/packages/common/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { run, memoryIO } from "./run.js"
|
||||
import { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "./types.js"
|
||||
|
||||
export { run, memoryIO, }
|
||||
export type { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
25
bindings/javascript/sync/packages/common/package.json
Normal file
25
bindings/javascript/sync/packages/common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-common",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
127
bindings/javascript/sync/packages/common/run.ts
Normal file
127
bindings/javascript/sync/packages/common/run.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
"use strict";
|
||||
|
||||
import { GeneratorResponse, ProtocolIo, RunOpts } from "./types.js";
|
||||
|
||||
const GENERATOR_RESUME_IO = 0;
|
||||
const GENERATOR_RESUME_DONE = 1;
|
||||
|
||||
interface TrackPromise<T> {
|
||||
promise: Promise<T>,
|
||||
finished: boolean
|
||||
}
|
||||
|
||||
function trackPromise<T>(p: Promise<T>): TrackPromise<T> {
|
||||
let status = { promise: null, finished: false };
|
||||
status.promise = p.finally(() => status.finished = true);
|
||||
return status;
|
||||
}
|
||||
|
||||
function timeoutMs(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function process(opts: RunOpts, io: ProtocolIo, request: any) {
|
||||
const requestType = request.request();
|
||||
const completion = request.completion();
|
||||
if (requestType.type == 'Http') {
|
||||
try {
|
||||
let headers = opts.headers;
|
||||
if (requestType.headers != null && requestType.headers.length > 0) {
|
||||
headers = { ...opts.headers };
|
||||
for (let header of requestType.headers) {
|
||||
headers[header[0]] = header[1];
|
||||
}
|
||||
}
|
||||
const response = await fetch(`${opts.url}${requestType.path}`, {
|
||||
method: requestType.method,
|
||||
headers: headers,
|
||||
body: requestType.body != null ? new Uint8Array(requestType.body) : null,
|
||||
});
|
||||
completion.status(response.status);
|
||||
const reader = response.body.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
completion.done();
|
||||
break;
|
||||
}
|
||||
completion.pushBuffer(value);
|
||||
}
|
||||
} catch (error) {
|
||||
completion.poison(`fetch error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullRead') {
|
||||
try {
|
||||
const metadata = await io.read(requestType.path);
|
||||
if (metadata != null) {
|
||||
completion.pushBuffer(metadata);
|
||||
}
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata read error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullWrite') {
|
||||
try {
|
||||
await io.write(requestType.path, requestType.content);
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata write error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'Transform') {
|
||||
if (opts.transform == null) {
|
||||
completion.poison("transform is not set");
|
||||
return;
|
||||
}
|
||||
const results = [];
|
||||
for (const mutation of requestType.mutations) {
|
||||
const result = opts.transform(mutation);
|
||||
if (result == null) {
|
||||
results.push({ type: 'Keep' });
|
||||
} else if (result.operation == 'skip') {
|
||||
results.push({ type: 'Skip' });
|
||||
} else if (result.operation == 'rewrite') {
|
||||
results.push({ type: 'Rewrite', stmt: result.stmt });
|
||||
} else {
|
||||
completion.poison("unexpected transform operation");
|
||||
return;
|
||||
}
|
||||
}
|
||||
completion.pushTransform(results);
|
||||
completion.done();
|
||||
}
|
||||
}
|
||||
|
||||
export function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export async function run(opts: RunOpts, io: ProtocolIo, engine: any, generator: any): Promise<any> {
|
||||
let tasks = [];
|
||||
while (true) {
|
||||
const { type, ...rest }: GeneratorResponse = await generator.resumeAsync(null);
|
||||
if (type == 'Done') {
|
||||
return null;
|
||||
}
|
||||
if (type == 'SyncEngineStats') {
|
||||
return rest;
|
||||
}
|
||||
for (let request = engine.protocolIo(); request != null; request = engine.protocolIo()) {
|
||||
tasks.push(trackPromise(process(opts, io, request)));
|
||||
}
|
||||
|
||||
const tasksRace = tasks.length == 0 ? Promise.resolve() : Promise.race([timeoutMs(opts.preemptionMs), ...tasks.map(t => t.promise)]);
|
||||
await Promise.all([engine.ioLoopAsync(), tasksRace]);
|
||||
|
||||
tasks = tasks.filter(t => !t.finished);
|
||||
}
|
||||
return generator.take();
|
||||
}
|
||||
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"dom"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
50
bindings/javascript/sync/packages/common/types.ts
Normal file
50
bindings/javascript/sync/packages/common/types.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
export declare const enum DatabaseChangeType {
|
||||
Insert = 0,
|
||||
Update = 1,
|
||||
Delete = 2
|
||||
}
|
||||
|
||||
export interface DatabaseRowMutation {
|
||||
changeTime: number
|
||||
tableName: string
|
||||
id: number
|
||||
changeType: DatabaseChangeType
|
||||
before?: Record<string, any>
|
||||
after?: Record<string, any>
|
||||
updates?: Record<string, any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResult = { operation: 'skip' } | { operation: 'rewrite', stmt: DatabaseRowStatement } | null;
|
||||
export type Transform = (arg: DatabaseRowMutation) => DatabaseRowTransformResult;
|
||||
export interface RunOpts {
|
||||
preemptionMs: number,
|
||||
url: string,
|
||||
headers: { [K: string]: string }
|
||||
transform?: Transform,
|
||||
}
|
||||
|
||||
export interface ProtocolIo {
|
||||
read(path: string): Promise<Buffer | Uint8Array | null>;
|
||||
write(path: string, content: Buffer | Uint8Array): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SyncOpts {
|
||||
path: string;
|
||||
clientName?: string;
|
||||
url: string;
|
||||
authToken?: string;
|
||||
encryptionKey?: string;
|
||||
tablesIgnore?: string[],
|
||||
transform?: Transform,
|
||||
tracing?: string,
|
||||
}
|
||||
|
||||
export interface DatabaseRowStatement {
|
||||
sql: string
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }
|
||||
125
bindings/javascript/sync/packages/native/README.md
Normal file
125
bindings/javascript/sync/packages/native/README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Node</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Node.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
- **Cross-platform**: Supports Linux (x86 and arm64), macOS, Windows (browser is supported in the separate package `@tursodatabase/database-browser` package)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
244
bindings/javascript/sync/packages/native/index.d.ts
vendored
Normal file
244
bindings/javascript/sync/packages/native/index.d.ts
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
/* auto-generated by NAPI-RS */
|
||||
/* eslint-disable */
|
||||
/** A database connection. */
|
||||
export declare class Database {
|
||||
/**
|
||||
* Creates a new database instance.
|
||||
*
|
||||
* # Arguments
|
||||
* * `path` - The path to the database file.
|
||||
*/
|
||||
constructor(path: string, opts?: DatabaseOpts | undefined | null)
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get memory(): boolean
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get path(): string
|
||||
/** Returns whether the database connection is open. */
|
||||
get open(): boolean
|
||||
/**
|
||||
* Executes a batch of SQL statements on main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statements to execute.
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchSync(sql: string): void
|
||||
/**
|
||||
* Executes a batch of SQL statements outside of main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statements to execute.
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchAsync(sql: string): Promise<void>
|
||||
/**
|
||||
* Prepares a statement for execution.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statement to prepare.
|
||||
*
|
||||
* # Returns
|
||||
*
|
||||
* A `Statement` instance.
|
||||
*/
|
||||
prepare(sql: string): Statement
|
||||
/**
|
||||
* Returns the rowid of the last row inserted.
|
||||
*
|
||||
* # Returns
|
||||
*
|
||||
* The rowid of the last row inserted.
|
||||
*/
|
||||
lastInsertRowid(): number
|
||||
/**
|
||||
* Returns the number of changes made by the last statement.
|
||||
*
|
||||
* # Returns
|
||||
*
|
||||
* The number of changes made by the last statement.
|
||||
*/
|
||||
changes(): number
|
||||
/**
|
||||
* Returns the total number of changes made by all statements.
|
||||
*
|
||||
* # Returns
|
||||
*
|
||||
* The total number of changes made by all statements.
|
||||
*/
|
||||
totalChanges(): number
|
||||
/**
|
||||
* Closes the database connection.
|
||||
*
|
||||
* # Returns
|
||||
*
|
||||
* `Ok(())` if the database is closed successfully.
|
||||
*/
|
||||
close(): void
|
||||
/**
|
||||
* Sets the default safe integers mode for all statements from this database.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `toggle` - Whether to use safe integers by default.
|
||||
*/
|
||||
defaultSafeIntegers(toggle?: boolean | undefined | null): void
|
||||
/** Runs the I/O loop synchronously. */
|
||||
ioLoopSync(): void
|
||||
/** Runs the I/O loop asynchronously, returning a Promise. */
|
||||
ioLoopAsync(): Promise<void>
|
||||
}
|
||||
|
||||
/** A prepared statement. */
|
||||
export declare class Statement {
|
||||
reset(): void
|
||||
/** Returns the number of parameters in the statement. */
|
||||
parameterCount(): number
|
||||
/**
|
||||
* Returns the name of a parameter at a specific 1-based index.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `index` - The 1-based parameter index.
|
||||
*/
|
||||
parameterName(index: number): string | null
|
||||
/**
|
||||
* Binds a parameter at a specific 1-based index with explicit type.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `index` - The 1-based parameter index.
|
||||
* * `value_type` - The type constant (0=null, 1=int, 2=float, 3=text, 4=blob).
|
||||
* * `value` - The value to bind.
|
||||
*/
|
||||
bindAt(index: number, value: unknown): void
|
||||
/**
|
||||
* Step the statement and return result code (executed on the main thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepSync(): number
|
||||
/**
|
||||
* Step the statement and return result code (executed on the background thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepAsync(): Promise<number>
|
||||
/** Get the current row data according to the presentation mode */
|
||||
row(): unknown
|
||||
/** Sets the presentation mode to raw. */
|
||||
raw(raw?: boolean | undefined | null): void
|
||||
/** Sets the presentation mode to pluck. */
|
||||
pluck(pluck?: boolean | undefined | null): void
|
||||
/**
|
||||
* Sets safe integers mode for this statement.
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `toggle` - Whether to use safe integers.
|
||||
*/
|
||||
safeIntegers(toggle?: boolean | undefined | null): void
|
||||
/** Get column information for the statement */
|
||||
columns(): Promise<any>
|
||||
/** Finalizes the statement. */
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
export declare class GeneratorHolder {
|
||||
resumeSync(error?: string | undefined | null): GeneratorResponse
|
||||
resumeAsync(error?: string | undefined | null): Promise<unknown>
|
||||
}
|
||||
|
||||
export declare class JsDataCompletion {
|
||||
poison(err: string): void
|
||||
status(value: number): void
|
||||
pushBuffer(value: Buffer): void
|
||||
pushTransform(values: Array<DatabaseRowTransformResultJs>): void
|
||||
done(): void
|
||||
}
|
||||
|
||||
export declare class JsProtocolIo {
|
||||
takeRequest(): JsProtocolRequestBytes | null
|
||||
}
|
||||
|
||||
export declare class JsProtocolRequestBytes {
|
||||
request(): JsProtocolRequest
|
||||
completion(): JsDataCompletion
|
||||
}
|
||||
|
||||
export declare class SyncEngine {
|
||||
constructor(opts: SyncEngineOpts)
|
||||
init(): GeneratorHolder
|
||||
ioLoopSync(): void
|
||||
/** Runs the I/O loop asynchronously, returning a Promise. */
|
||||
ioLoopAsync(): Promise<void>
|
||||
protocolIo(): JsProtocolRequestBytes | null
|
||||
sync(): GeneratorHolder
|
||||
push(): GeneratorHolder
|
||||
stats(): GeneratorHolder
|
||||
pull(): GeneratorHolder
|
||||
checkpoint(): GeneratorHolder
|
||||
open(): Database
|
||||
close(): void
|
||||
}
|
||||
|
||||
export declare const enum DatabaseChangeTypeJs {
|
||||
Insert = 0,
|
||||
Update = 1,
|
||||
Delete = 2
|
||||
}
|
||||
|
||||
export interface DatabaseOpts {
|
||||
path: string
|
||||
}
|
||||
|
||||
export interface DatabaseRowMutationJs {
|
||||
changeTime: number
|
||||
tableName: string
|
||||
id: number
|
||||
changeType: DatabaseChangeTypeJs
|
||||
before?: Record<string, any>
|
||||
after?: Record<string, any>
|
||||
updates?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface DatabaseRowStatementJs {
|
||||
sql: string
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResultJs =
|
||||
| { type: 'Keep' }
|
||||
| { type: 'Skip' }
|
||||
| { type: 'Rewrite', stmt: DatabaseRowStatementJs }
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime?: number }
|
||||
|
||||
export type JsProtocolRequest =
|
||||
| { type: 'Http', method: string, path: string, body?: Array<number>, headers: Array<[string, string]> }
|
||||
| { type: 'FullRead', path: string }
|
||||
| { type: 'FullWrite', path: string, content: Array<number> }
|
||||
| { type: 'Transform', mutations: Array<DatabaseRowMutationJs> }
|
||||
|
||||
export interface SyncEngineOpts {
|
||||
path: string
|
||||
clientName?: string
|
||||
walPullBatchSize?: number
|
||||
tracing?: string
|
||||
tablesIgnore?: Array<string>
|
||||
useTransform: boolean
|
||||
protocolVersion?: SyncEngineProtocolVersion
|
||||
}
|
||||
|
||||
export declare const enum SyncEngineProtocolVersion {
|
||||
Legacy = 0,
|
||||
V1 = 1
|
||||
}
|
||||
520
bindings/javascript/sync/packages/native/index.js
Normal file
520
bindings/javascript/sync/packages/native/index.js
Normal file
@@ -0,0 +1,520 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm-eabi')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm-eabi/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-x64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-x64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./sync.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-ia32-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-ia32-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-arm64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-arm64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./sync.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-universal')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-universal/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-musleabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-musleabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-gnueabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-gnueabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./sync.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-ppc64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-ppc64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./sync.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-s390x-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-s390x-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.openharmony-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./sync.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/sync-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement, GeneratorHolder, JsDataCompletion, JsProtocolIo, JsProtocolRequestBytes, SyncEngine, DatabaseChangeTypeJs, SyncEngineProtocolVersion } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
export { GeneratorHolder }
|
||||
export { JsDataCompletion }
|
||||
export { JsProtocolIo }
|
||||
export { JsProtocolRequestBytes }
|
||||
export { SyncEngine }
|
||||
export { DatabaseChangeTypeJs }
|
||||
export { SyncEngineProtocolVersion }
|
||||
53
bindings/javascript/sync/packages/native/package.json
Normal file
53
bindings/javascript/sync/packages/native/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"license": "MIT",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --platform --release --esm --manifest-path ../../Cargo.toml --output-dir .",
|
||||
"napi-dirs": "napi create-npm-dirs",
|
||||
"napi-artifacts": "napi artifacts --output-dir .",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "vitest --run",
|
||||
"prepublishOnly": "npm run napi-dirs && npm run napi-artifacts && napi prepublish -t npm"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5",
|
||||
"@tursodatabase/sync-common": "^0.1.5"
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
}
|
||||
}
|
||||
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const rows = await db2.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
} finally {
|
||||
unlinkSync(path);
|
||||
unlinkSync(`${path}-wal`);
|
||||
unlinkSync(`${path}-info`);
|
||||
unlinkSync(`${path}-changes`);
|
||||
try { unlinkSync(`${path}-revert`) } catch (e) { }
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "@tursodatabase/sync-common";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
import { promises } from "node:fs";
|
||||
|
||||
let NodeIO: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
try {
|
||||
return await promises.readFile(path);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const unix = Math.floor(Date.now() / 1000);
|
||||
const nonce = Math.floor(Math.random() * 1000000000);
|
||||
const tmp = `${path}.tmp.${unix}.${nonce}`;
|
||||
await promises.writeFile(tmp, new Uint8Array(data));
|
||||
try {
|
||||
await promises.rename(tmp, path);
|
||||
} catch (err) {
|
||||
await promises.unlink(tmp);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.engine.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
let io = opts.path == ':memory:' ? memoryIO() : NodeIO;
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, {});
|
||||
}
|
||||
|
||||
export { connect, Database, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.d.ts"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
102
bindings/javascript/sync/src/generator.rs
Normal file
102
bindings/javascript/sync/src/generator.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use napi::{bindgen_prelude::AsyncTask, Env, Task};
|
||||
use napi_derive::napi;
|
||||
use std::{
|
||||
future::Future,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use turso_sync_engine::types::ProtocolCommand;
|
||||
|
||||
pub const GENERATOR_RESUME_IO: u32 = 0;
|
||||
pub const GENERATOR_RESUME_DONE: u32 = 1;
|
||||
|
||||
pub trait Generator {
|
||||
fn resume(&mut self, result: Option<String>) -> napi::Result<GeneratorResponse>;
|
||||
}
|
||||
|
||||
impl<F: Future<Output = turso_sync_engine::Result<()>>> Generator
|
||||
for genawaiter::sync::Gen<ProtocolCommand, turso_sync_engine::Result<()>, F>
|
||||
{
|
||||
fn resume(&mut self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = match error {
|
||||
Some(err) => Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
format!("JsProtocolIo error: {err}"),
|
||||
)),
|
||||
None => Ok(()),
|
||||
};
|
||||
match self.resume_with(result) {
|
||||
genawaiter::GeneratorState::Yielded(ProtocolCommand::IO) => Ok(GeneratorResponse::IO),
|
||||
genawaiter::GeneratorState::Complete(Ok(())) => Ok(GeneratorResponse::Done),
|
||||
genawaiter::GeneratorState::Complete(Err(err)) => Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("sync engine operation failed: {err}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
pub enum GeneratorResponse {
|
||||
IO,
|
||||
Done,
|
||||
SyncEngineStats {
|
||||
operations: i64,
|
||||
main_wal: i64,
|
||||
revert_wal: i64,
|
||||
last_pull_unix_time: i64,
|
||||
last_push_unix_time: Option<i64>,
|
||||
},
|
||||
}
|
||||
|
||||
#[napi]
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratorHolder {
|
||||
pub(crate) generator: Arc<Mutex<dyn Generator>>,
|
||||
pub(crate) response: Arc<Mutex<Option<GeneratorResponse>>>,
|
||||
}
|
||||
|
||||
pub struct ResumeTask {
|
||||
holder: GeneratorHolder,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
unsafe impl Send for ResumeTask {}
|
||||
|
||||
impl Task for ResumeTask {
|
||||
type Output = GeneratorResponse;
|
||||
type JsValue = GeneratorResponse;
|
||||
|
||||
fn compute(&mut self) -> napi::Result<Self::Output> {
|
||||
resume_sync(&self.holder, self.error.take())
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: Env, output: Self::Output) -> napi::Result<Self::JsValue> {
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
fn resume_sync(holder: &GeneratorHolder, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = holder.generator.lock().unwrap().resume(error)?;
|
||||
if let GeneratorResponse::Done = result {
|
||||
let response = holder.response.lock().unwrap().take();
|
||||
Ok(response.unwrap_or(GeneratorResponse::Done))
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GeneratorHolder {
|
||||
#[napi]
|
||||
pub fn resume_sync(&self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
resume_sync(&self, error)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn resume_async(&self, error: Option<String>) -> napi::Result<AsyncTask<ResumeTask>> {
|
||||
Ok(AsyncTask::new(ResumeTask {
|
||||
holder: self.clone(),
|
||||
error,
|
||||
}))
|
||||
}
|
||||
}
|
||||
283
bindings/javascript/sync/src/js_protocol_io.rs
Normal file
283
bindings/javascript/sync/src/js_protocol_io.rs
Normal file
@@ -0,0 +1,283 @@
|
||||
#![deny(clippy::all)]
|
||||
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
sync::{Arc, Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use turso_sync_engine::{
|
||||
protocol_io::{DataCompletion, DataPollResult, ProtocolIO},
|
||||
types::{DatabaseRowTransformResult, DatabaseStatementReplay},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
core_change_type_to_js, core_values_map_to_js, js_value_to_core, DatabaseRowMutationJs,
|
||||
DatabaseRowTransformResultJs,
|
||||
};
|
||||
|
||||
#[napi]
|
||||
pub enum JsProtocolRequest {
|
||||
Http {
|
||||
method: String,
|
||||
path: String,
|
||||
body: Option<Vec<u8>>,
|
||||
headers: Vec<(String, String)>,
|
||||
},
|
||||
FullRead {
|
||||
path: String,
|
||||
},
|
||||
FullWrite {
|
||||
path: String,
|
||||
content: Vec<u8>,
|
||||
},
|
||||
Transform {
|
||||
mutations: Vec<DatabaseRowMutationJs>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[napi]
|
||||
pub struct JsDataCompletion(Arc<Mutex<JsDataCompletionInner>>);
|
||||
|
||||
pub struct JsBytesPollResult(Buffer);
|
||||
|
||||
impl DataPollResult<u8> for JsBytesPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
pub struct JsTransformPollResult(Vec<DatabaseRowTransformResult>);
|
||||
|
||||
impl DataPollResult<DatabaseRowTransformResult> for JsTransformPollResult {
|
||||
fn data(&self) -> &[DatabaseRowTransformResult] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
struct JsDataCompletionInner {
|
||||
status: Option<u16>,
|
||||
chunks: VecDeque<Buffer>,
|
||||
transformed: VecDeque<DatabaseRowTransformResult>,
|
||||
finished: bool,
|
||||
err: Option<String>,
|
||||
}
|
||||
|
||||
impl JsDataCompletion {
|
||||
fn inner(&self) -> turso_sync_engine::Result<MutexGuard<JsDataCompletionInner>> {
|
||||
let inner = self.0.lock().unwrap();
|
||||
if let Some(err) = &inner.err {
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
err.clone(),
|
||||
));
|
||||
}
|
||||
Ok(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion<u8> for JsDataCompletion {
|
||||
type DataPollResult = JsBytesPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.status)
|
||||
}
|
||||
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.chunks.pop_front();
|
||||
Ok(chunk.map(JsBytesPollResult))
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.finished)
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion<DatabaseRowTransformResult> for JsDataCompletion {
|
||||
type DataPollResult = JsTransformPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.status)
|
||||
}
|
||||
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.transformed.drain(..).collect::<Vec<_>>();
|
||||
if chunk.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(JsTransformPollResult(chunk)))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.finished)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsDataCompletion {
|
||||
#[napi]
|
||||
pub fn poison(&self, err: String) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.err = Some(err);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn status(&self, value: u32) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.status = Some(value as u16);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push_buffer(&self, value: Buffer) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.chunks.push_back(value);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push_transform(&self, values: Vec<DatabaseRowTransformResultJs>) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
for value in values {
|
||||
completion.transformed.push_back(match value {
|
||||
DatabaseRowTransformResultJs::Keep => DatabaseRowTransformResult::Keep,
|
||||
DatabaseRowTransformResultJs::Skip => DatabaseRowTransformResult::Skip,
|
||||
DatabaseRowTransformResultJs::Rewrite { stmt } => {
|
||||
DatabaseRowTransformResult::Rewrite(DatabaseStatementReplay {
|
||||
sql: stmt.sql,
|
||||
values: stmt.values.into_iter().map(js_value_to_core).collect(),
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn done(&self) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.finished = true;
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolRequestBytes {
|
||||
request: Arc<Mutex<Option<JsProtocolRequest>>>,
|
||||
completion: JsDataCompletion,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsProtocolRequestBytes {
|
||||
#[napi]
|
||||
pub fn request(&self) -> JsProtocolRequest {
|
||||
let mut request = self.request.lock().unwrap();
|
||||
request.take().unwrap()
|
||||
}
|
||||
#[napi]
|
||||
pub fn completion(&self) -> JsDataCompletion {
|
||||
self.completion.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtocolIO for JsProtocolIo {
|
||||
type DataCompletionBytes = JsDataCompletion;
|
||||
type DataCompletionTransform = JsDataCompletion;
|
||||
|
||||
fn http(
|
||||
&self,
|
||||
method: &str,
|
||||
path: &str,
|
||||
body: Option<Vec<u8>>,
|
||||
headers: &[(&str, &str)],
|
||||
) -> turso_sync_engine::Result<JsDataCompletion> {
|
||||
Ok(self.add_request(JsProtocolRequest::Http {
|
||||
method: method.to_string(),
|
||||
path: path.to_string(),
|
||||
body,
|
||||
headers: headers
|
||||
.iter()
|
||||
.map(|x| (x.0.to_string(), x.1.to_string()))
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullRead {
|
||||
path: path.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn full_write(
|
||||
&self,
|
||||
path: &str,
|
||||
content: Vec<u8>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullWrite {
|
||||
path: path.to_string(),
|
||||
content,
|
||||
}))
|
||||
}
|
||||
|
||||
fn transform(
|
||||
&self,
|
||||
mutations: Vec<turso_sync_engine::types::DatabaseRowMutation>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionTransform> {
|
||||
Ok(self.add_request(JsProtocolRequest::Transform {
|
||||
mutations: mutations
|
||||
.into_iter()
|
||||
.map(|mutation| DatabaseRowMutationJs {
|
||||
change_time: mutation.change_time as i64,
|
||||
table_name: mutation.table_name,
|
||||
id: mutation.id,
|
||||
change_type: core_change_type_to_js(mutation.change_type),
|
||||
before: mutation.before.map(core_values_map_to_js),
|
||||
after: mutation.after.map(core_values_map_to_js),
|
||||
updates: mutation.updates.map(core_values_map_to_js),
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolIo {
|
||||
requests: Mutex<Vec<JsProtocolRequestBytes>>,
|
||||
}
|
||||
|
||||
impl Default for JsProtocolIo {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
requests: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsProtocolIo {
|
||||
#[napi]
|
||||
pub fn take_request(&self) -> Option<JsProtocolRequestBytes> {
|
||||
self.requests.lock().unwrap().pop()
|
||||
}
|
||||
|
||||
fn add_request(&self, request: JsProtocolRequest) -> JsDataCompletion {
|
||||
let completion = JsDataCompletionInner {
|
||||
chunks: VecDeque::new(),
|
||||
transformed: VecDeque::new(),
|
||||
finished: false,
|
||||
err: None,
|
||||
status: None,
|
||||
};
|
||||
let completion = JsDataCompletion(Arc::new(Mutex::new(completion)));
|
||||
|
||||
let mut requests = self.requests.lock().unwrap();
|
||||
requests.push(JsProtocolRequestBytes {
|
||||
request: Arc::new(Mutex::new(Some(request))),
|
||||
completion: completion.clone(),
|
||||
});
|
||||
completion
|
||||
}
|
||||
}
|
||||
413
bindings/javascript/sync/src/lib.rs
Normal file
413
bindings/javascript/sync/src/lib.rs
Normal file
@@ -0,0 +1,413 @@
|
||||
#![allow(clippy::await_holding_lock)]
|
||||
#![allow(clippy::type_complexity)]
|
||||
|
||||
pub mod generator;
|
||||
pub mod js_protocol_io;
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex, OnceLock, RwLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use napi::bindgen_prelude::{AsyncTask, Either5, Null};
|
||||
use napi_derive::napi;
|
||||
use tracing_subscriber::{filter::LevelFilter, fmt::format::FmtSpan};
|
||||
use turso_node::IoLoopTask;
|
||||
use turso_sync_engine::{
|
||||
database_sync_engine::{DatabaseSyncEngine, DatabaseSyncEngineOpts},
|
||||
types::{Coro, DatabaseChangeType, DatabaseSyncEngineProtocolVersion},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
generator::{GeneratorHolder, GeneratorResponse},
|
||||
js_protocol_io::{JsProtocolIo, JsProtocolRequestBytes},
|
||||
};
|
||||
|
||||
#[napi(object)]
|
||||
pub struct DatabaseOpts {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct SyncEngine {
|
||||
path: String,
|
||||
client_name: String,
|
||||
wal_pull_batch_size: u32,
|
||||
protocol_version: DatabaseSyncEngineProtocolVersion,
|
||||
tables_ignore: Vec<String>,
|
||||
use_transform: bool,
|
||||
io: Option<Arc<dyn turso_core::IO>>,
|
||||
protocol: Option<Arc<JsProtocolIo>>,
|
||||
sync_engine: Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
opened: Arc<Mutex<Option<turso_node::Database>>>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub enum DatabaseChangeTypeJs {
|
||||
Insert,
|
||||
Update,
|
||||
Delete,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub enum SyncEngineProtocolVersion {
|
||||
Legacy,
|
||||
V1,
|
||||
}
|
||||
|
||||
fn core_change_type_to_js(value: DatabaseChangeType) -> DatabaseChangeTypeJs {
|
||||
match value {
|
||||
DatabaseChangeType::Delete => DatabaseChangeTypeJs::Delete,
|
||||
DatabaseChangeType::Update => DatabaseChangeTypeJs::Update,
|
||||
DatabaseChangeType::Insert => DatabaseChangeTypeJs::Insert,
|
||||
}
|
||||
}
|
||||
fn js_value_to_core(value: Either5<Null, i64, f64, String, Vec<u8>>) -> turso_core::Value {
|
||||
match value {
|
||||
Either5::A(_) => turso_core::Value::Null,
|
||||
Either5::B(value) => turso_core::Value::Integer(value),
|
||||
Either5::C(value) => turso_core::Value::Float(value),
|
||||
Either5::D(value) => turso_core::Value::Text(turso_core::types::Text::new(&value)),
|
||||
Either5::E(value) => turso_core::Value::Blob(value),
|
||||
}
|
||||
}
|
||||
fn core_value_to_js(value: turso_core::Value) -> Either5<Null, i64, f64, String, Vec<u8>> {
|
||||
match value {
|
||||
turso_core::Value::Null => Either5::<Null, i64, f64, String, Vec<u8>>::A(Null),
|
||||
turso_core::Value::Integer(value) => Either5::<Null, i64, f64, String, Vec<u8>>::B(value),
|
||||
turso_core::Value::Float(value) => Either5::<Null, i64, f64, String, Vec<u8>>::C(value),
|
||||
turso_core::Value::Text(value) => {
|
||||
Either5::<Null, i64, f64, String, Vec<u8>>::D(value.as_str().to_string())
|
||||
}
|
||||
turso_core::Value::Blob(value) => Either5::<Null, i64, f64, String, Vec<u8>>::E(value),
|
||||
}
|
||||
}
|
||||
fn core_values_map_to_js(
|
||||
value: HashMap<String, turso_core::Value>,
|
||||
) -> HashMap<String, Either5<Null, i64, f64, String, Vec<u8>>> {
|
||||
let mut result = HashMap::new();
|
||||
for (key, value) in value {
|
||||
result.insert(key, core_value_to_js(value));
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
pub struct DatabaseRowMutationJs {
|
||||
pub change_time: i64,
|
||||
pub table_name: String,
|
||||
pub id: i64,
|
||||
pub change_type: DatabaseChangeTypeJs,
|
||||
pub before: Option<HashMap<String, Either5<Null, i64, f64, String, Vec<u8>>>>,
|
||||
pub after: Option<HashMap<String, Either5<Null, i64, f64, String, Vec<u8>>>>,
|
||||
pub updates: Option<HashMap<String, Either5<Null, i64, f64, String, Vec<u8>>>>,
|
||||
}
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Debug)]
|
||||
pub struct DatabaseRowStatementJs {
|
||||
pub sql: String,
|
||||
pub values: Vec<Either5<Null, i64, f64, String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
#[derive(Debug)]
|
||||
pub enum DatabaseRowTransformResultJs {
|
||||
Keep,
|
||||
Skip,
|
||||
Rewrite { stmt: DatabaseRowStatementJs },
|
||||
}
|
||||
|
||||
#[napi(object, object_to_js = false)]
|
||||
pub struct SyncEngineOpts {
|
||||
pub path: String,
|
||||
pub client_name: Option<String>,
|
||||
pub wal_pull_batch_size: Option<u32>,
|
||||
pub tracing: Option<String>,
|
||||
pub tables_ignore: Option<Vec<String>>,
|
||||
pub use_transform: bool,
|
||||
pub protocol_version: Option<SyncEngineProtocolVersion>,
|
||||
}
|
||||
|
||||
static TRACING_INIT: OnceLock<()> = OnceLock::new();
|
||||
pub fn init_tracing(level_filter: LevelFilter) {
|
||||
TRACING_INIT.get_or_init(|| {
|
||||
tracing_subscriber::fmt()
|
||||
.with_ansi(false)
|
||||
.with_thread_ids(true)
|
||||
.with_span_events(FmtSpan::ACTIVE)
|
||||
.with_max_level(level_filter)
|
||||
.init();
|
||||
});
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl SyncEngine {
|
||||
#[napi(constructor)]
|
||||
pub fn new(opts: SyncEngineOpts) -> napi::Result<Self> {
|
||||
// helpful for local debugging
|
||||
match opts.tracing.as_deref() {
|
||||
Some("info") => init_tracing(LevelFilter::INFO),
|
||||
Some("debug") => init_tracing(LevelFilter::DEBUG),
|
||||
Some("trace") => init_tracing(LevelFilter::TRACE),
|
||||
_ => {}
|
||||
}
|
||||
let is_memory = opts.path == ":memory:";
|
||||
let io: Arc<dyn turso_core::IO> = if is_memory {
|
||||
Arc::new(turso_core::MemoryIO::new())
|
||||
} else {
|
||||
#[cfg(not(feature = "browser"))]
|
||||
{
|
||||
Arc::new(turso_core::PlatformIO::new().map_err(|e| {
|
||||
napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("Failed to create IO: {e}"),
|
||||
)
|
||||
})?)
|
||||
}
|
||||
#[cfg(feature = "browser")]
|
||||
{
|
||||
Arc::new(turso_node::browser::Opfs::new()?)
|
||||
}
|
||||
};
|
||||
Ok(SyncEngine {
|
||||
path: opts.path,
|
||||
client_name: opts.client_name.unwrap_or("turso-sync-js".to_string()),
|
||||
wal_pull_batch_size: opts.wal_pull_batch_size.unwrap_or(100),
|
||||
tables_ignore: opts.tables_ignore.unwrap_or_default(),
|
||||
use_transform: opts.use_transform,
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
sync_engine: Arc::new(RwLock::new(None)),
|
||||
io: Some(io),
|
||||
protocol: Some(Arc::new(JsProtocolIo::default())),
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
opened: Arc::new(Mutex::new(None)),
|
||||
protocol_version: match opts.protocol_version {
|
||||
Some(SyncEngineProtocolVersion::Legacy) | None => {
|
||||
DatabaseSyncEngineProtocolVersion::Legacy
|
||||
}
|
||||
_ => DatabaseSyncEngineProtocolVersion::V1,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn init(&mut self) -> napi::Result<GeneratorHolder> {
|
||||
let opts = DatabaseSyncEngineOpts {
|
||||
client_name: self.client_name.clone(),
|
||||
wal_pull_batch_size: self.wal_pull_batch_size as u64,
|
||||
tables_ignore: self.tables_ignore.clone(),
|
||||
use_transform: self.use_transform,
|
||||
protocol_version_hint: self.protocol_version,
|
||||
};
|
||||
|
||||
let io = self.io()?;
|
||||
let protocol = self.protocol()?;
|
||||
let sync_engine = self.sync_engine.clone();
|
||||
let opened = self.opened.clone();
|
||||
let path = self.path.clone();
|
||||
let generator = genawaiter::sync::Gen::new(|coro| async move {
|
||||
let coro = Coro::new((), coro);
|
||||
let initialized =
|
||||
DatabaseSyncEngine::new(&coro, io.clone(), protocol, &path, opts).await?;
|
||||
let connection = initialized.connect_rw(&coro).await?;
|
||||
let db = turso_node::Database::create(None, io.clone(), connection, path);
|
||||
|
||||
*sync_engine.write().unwrap() = Some(initialized);
|
||||
*opened.lock().unwrap() = Some(db);
|
||||
Ok(())
|
||||
});
|
||||
Ok(GeneratorHolder {
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response: Arc::new(Mutex::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn io_loop_sync(&self) -> napi::Result<()> {
|
||||
self.io()?.step().map_err(|e| {
|
||||
napi::Error::new(napi::Status::GenericFailure, format!("IO error: {e}"))
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Runs the I/O loop asynchronously, returning a Promise.
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn io_loop_async(&self) -> napi::Result<AsyncTask<IoLoopTask>> {
|
||||
let io = self.io()?;
|
||||
Ok(AsyncTask::new(IoLoopTask { io }))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn protocol_io(&self) -> napi::Result<Option<JsProtocolRequestBytes>> {
|
||||
Ok(self.protocol()?.take_request())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn sync(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.sync(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
sync_engine.push_changes_to_remote(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn stats(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
let changes = sync_engine.stats(coro).await?;
|
||||
Ok(Some(GeneratorResponse::SyncEngineStats {
|
||||
operations: changes.cdc_operations,
|
||||
main_wal: changes.main_wal_size as i64,
|
||||
revert_wal: changes.revert_wal_size as i64,
|
||||
last_pull_unix_time: changes.last_pull_unix_time,
|
||||
last_push_unix_time: changes.last_push_unix_time,
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn pull(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.pull_changes_from_remote(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn checkpoint(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.checkpoint(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn open(&self) -> napi::Result<turso_node::Database> {
|
||||
let opened = self.opened.lock().unwrap();
|
||||
let Some(opened) = opened.as_ref() else {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync_engine must be initialized".to_string(),
|
||||
));
|
||||
};
|
||||
Ok(opened.clone())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn close(&mut self) {
|
||||
let _ = self.sync_engine.write().unwrap().take();
|
||||
let _ = self.opened.lock().unwrap().take().unwrap();
|
||||
let _ = self.io.take();
|
||||
let _ = self.protocol.take();
|
||||
}
|
||||
|
||||
fn io(&self) -> napi::Result<Arc<dyn turso_core::IO>> {
|
||||
if self.io.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.io.as_ref().unwrap().clone())
|
||||
}
|
||||
fn protocol(&self) -> napi::Result<Arc<JsProtocolIo>> {
|
||||
if self.protocol.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.protocol.as_ref().unwrap().clone())
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
f: impl AsyncFnOnce(
|
||||
&Coro<()>,
|
||||
&Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
) -> turso_sync_engine::Result<Option<GeneratorResponse>>
|
||||
+ 'static,
|
||||
) -> GeneratorHolder {
|
||||
let response = Arc::new(Mutex::new(None));
|
||||
let sync_engine = self.sync_engine.clone();
|
||||
#[allow(clippy::await_holding_lock)]
|
||||
let generator = genawaiter::sync::Gen::new({
|
||||
let response = response.clone();
|
||||
|coro| async move {
|
||||
let coro = Coro::new((), coro);
|
||||
*response.lock().unwrap() = f(&coro, &sync_engine).await?;
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
GeneratorHolder {
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn try_read(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_read() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
nasty_error,
|
||||
));
|
||||
};
|
||||
Ok(sync_engine)
|
||||
}
|
||||
|
||||
fn try_write(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_write() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
nasty_error,
|
||||
));
|
||||
};
|
||||
Ok(sync_engine)
|
||||
}
|
||||
|
||||
fn try_unwrap<'a>(
|
||||
sync_engine: &'a RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_ref() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
error,
|
||||
));
|
||||
};
|
||||
Ok(sync_engine)
|
||||
}
|
||||
|
||||
fn try_unwrap_mut<'a>(
|
||||
sync_engine: &'a mut RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a mut DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_mut() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
error,
|
||||
));
|
||||
};
|
||||
Ok(sync_engine)
|
||||
}
|
||||
@@ -1394,12 +1394,21 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@tursodatabase/database-browser-common@npm:^0.1.5, @tursodatabase/database-browser-common@workspace:packages/browser-common":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser-common@workspace:packages/browser-common"
|
||||
dependencies:
|
||||
typescript: "npm:^5.9.2"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/database-browser@workspace:packages/browser":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser@workspace:packages/browser"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@napi-rs/wasm-runtime": "npm:^1.0.3"
|
||||
"@tursodatabase/database-browser-common": "npm:^0.1.5"
|
||||
"@tursodatabase/database-common": "npm:^0.1.5"
|
||||
"@vitest/browser": "npm:^3.2.4"
|
||||
playwright: "npm:^1.55.0"
|
||||
|
||||
Reference in New Issue
Block a user