mirror of
https://github.com/aljazceru/turso.git
synced 2025-12-17 16:44:19 +01:00
opfs for sync in one commit!
This commit is contained in:
@@ -6,6 +6,7 @@ members = [
|
||||
"bindings/dart/rust",
|
||||
"bindings/java",
|
||||
"bindings/javascript",
|
||||
"bindings/javascript/sync",
|
||||
"bindings/python",
|
||||
"bindings/rust",
|
||||
"cli",
|
||||
@@ -27,7 +28,6 @@ members = [
|
||||
"vendored/sqlite3-parser/sqlparser_bench",
|
||||
"parser",
|
||||
"sync/engine",
|
||||
"sync/javascript",
|
||||
"sql_generation",
|
||||
]
|
||||
exclude = ["perf/latency/limbo"]
|
||||
|
||||
272
bindings/javascript/examples/browser/index.html
Normal file
272
bindings/javascript/examples/browser/index.html
Normal file
@@ -0,0 +1,272 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Brutal DB Viewer</title>
|
||||
<style>
|
||||
:root {
|
||||
--fg: #000;
|
||||
--bg: #fff;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
margin: 0 10%;
|
||||
padding: 0;
|
||||
background: var(--bg);
|
||||
color: var(--fg);
|
||||
font: 14px/1.4 ui-monospace, SFMono-Regular, Menlo, Consolas, "Liberation Mono", monospace;
|
||||
}
|
||||
|
||||
header {
|
||||
border-bottom: 2px solid #000;
|
||||
padding: 12px 16px;
|
||||
font-weight: 700;
|
||||
letter-spacing: .03em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
main {
|
||||
padding: 16px;
|
||||
display: grid;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 6px;
|
||||
}
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
min-height: 128px;
|
||||
max-height: 60vh;
|
||||
resize: vertical;
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
button {
|
||||
appearance: none;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
border: 1px solid #000;
|
||||
padding: 6px 10px;
|
||||
cursor: pointer;
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
transform: translate(-1px, -1px);
|
||||
box-shadow: 2px 2px 0 #000;
|
||||
}
|
||||
|
||||
button:active {
|
||||
transform: translate(0, 0);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.status {
|
||||
margin-left: auto;
|
||||
opacity: .9;
|
||||
}
|
||||
|
||||
#result {
|
||||
border-top: 2px solid #000;
|
||||
padding-top: 12px;
|
||||
}
|
||||
|
||||
.meta {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.error {
|
||||
border: 1px solid #000;
|
||||
padding: 8px;
|
||||
margin-bottom: 8px;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.table-wrap {
|
||||
overflow: auto;
|
||||
border: 1px solid #000;
|
||||
max-height: 65vh;
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
thead th {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
border: 1px solid #000;
|
||||
padding: 6px 8px;
|
||||
vertical-align: top;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
border: 0;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<header>DB Viewer</header>
|
||||
<main>
|
||||
<section>
|
||||
<label for="sql">Query</label>
|
||||
<textarea id="sql" spellcheck="false" placeholder="SELECT * FROM people;">SELECT 'hello, world';</textarea>
|
||||
<div class="controls">
|
||||
<button id="run" type="button" title="Run (Ctrl/⌘ + Enter)">Run</button>
|
||||
<div class="status" id="status">Ready</div>
|
||||
</div>
|
||||
<div class="sr-only" aria-live="polite" id="live"></div>
|
||||
</section>
|
||||
|
||||
<section id="result">
|
||||
<div class="meta" id="meta">No results yet.</div>
|
||||
<div id="error" class="error" hidden></div>
|
||||
<div class="table-wrap">
|
||||
<table id="table" role="table" aria-label="Query results">
|
||||
<thead></thead>
|
||||
<tbody></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<script type="module">
|
||||
import { connect } from "@tursodatabase/database-browser";
|
||||
const db = await connect('data.db');
|
||||
// --- Wire your DB here --------------------------------------------------
|
||||
// Provide window.executeQuery = async (sql) => ({ columns: string[], rows: any[][] })
|
||||
// If not provided, a tiny mock dataset is used for demo purposes.
|
||||
|
||||
(function () {
|
||||
const $ = (sel) => document.querySelector(sel);
|
||||
const sqlEl = $('#sql');
|
||||
const runBtn = $('#run');
|
||||
const statusEl = $('#status');
|
||||
const liveEl = $('#live');
|
||||
const metaEl = $('#meta');
|
||||
const errEl = $('#error');
|
||||
const thead = $('#table thead');
|
||||
const tbody = $('#table tbody');
|
||||
|
||||
function fmt(v) {
|
||||
if (v === null || v === undefined) return 'NULL';
|
||||
if (typeof v === 'object') {
|
||||
try { return JSON.stringify(v); } catch { return String(v); }
|
||||
}
|
||||
return String(v);
|
||||
}
|
||||
|
||||
function clearTable() { thead.innerHTML = ''; tbody.innerHTML = ''; }
|
||||
|
||||
function renderTable(result) {
|
||||
clearTable();
|
||||
const { columns = [], rows = [] } = result || {};
|
||||
|
||||
// Header
|
||||
const trh = document.createElement('tr');
|
||||
for (const name of columns) {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = String(name);
|
||||
trh.appendChild(th);
|
||||
}
|
||||
thead.appendChild(trh);
|
||||
|
||||
// Body
|
||||
const frag = document.createDocumentFragment();
|
||||
for (const r of rows) {
|
||||
const tr = document.createElement('tr');
|
||||
for (let i = 0; i < columns.length; i++) {
|
||||
const td = document.createElement('td');
|
||||
td.textContent = fmt(r[i] ?? null);
|
||||
tr.appendChild(td);
|
||||
}
|
||||
frag.appendChild(tr);
|
||||
}
|
||||
tbody.appendChild(frag);
|
||||
|
||||
metaEl.textContent = rows.length
|
||||
? `${rows.length} row${rows.length === 1 ? '' : 's'} × ${columns.length} column${columns.length === 1 ? '' : 's'}`
|
||||
: 'No rows.';
|
||||
}
|
||||
|
||||
async function run(sql) {
|
||||
// errEl.hidden = true; errEl.textContent = '';
|
||||
// statusEl.textContent = 'Running…';
|
||||
let t0 = performance.now();
|
||||
try {
|
||||
for (let i = 0; i < 1; i++) {
|
||||
await db.pingSync();
|
||||
}
|
||||
const res = {};
|
||||
// const stmt = await scheduler.postTask(async () => await db.prepare(sql), { priority: 'user-blocking' });
|
||||
// const columns = await scheduler.postTask(async () => (await stmt.columns()).map(x => x.name), { priority: 'user-blocking' });
|
||||
// const rows = await scheduler.postTask(async () => await stmt.all(), { priority: 'user-blocking' });
|
||||
// const res = {
|
||||
// columns: columns,
|
||||
// rows: rows.map(r => columns.map(c => r[c]))
|
||||
// };
|
||||
const t1 = performance.now();
|
||||
renderTable(res);
|
||||
const took = Math.max(0, t1 - t0);
|
||||
statusEl.textContent = `OK (${took}ms)`;
|
||||
liveEl.textContent = `Query finished in ${took} milliseconds.`;
|
||||
} catch (e) {
|
||||
clearTable();
|
||||
statusEl.textContent = 'ERROR';
|
||||
const msg = (e && (e.message || e.toString())) || 'Unknown error';
|
||||
errEl.textContent = 'ERROR: ' + msg;
|
||||
errEl.hidden = false;
|
||||
liveEl.textContent = 'Query failed.';
|
||||
}
|
||||
}
|
||||
|
||||
runBtn.addEventListener('click', () => run(sqlEl.value));
|
||||
sqlEl.addEventListener('keydown', (e) => {
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
run(sqlEl.value);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial demo run
|
||||
run(sqlEl.value);
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
19
bindings/javascript/examples/browser/package.json
Normal file
19
bindings/javascript/examples/browser/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-browser": "../../browser"
|
||||
}
|
||||
}
|
||||
22
bindings/javascript/examples/browser/vite.config.js
Normal file
22
bindings/javascript/examples/browser/vite.config.js
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.', '../../']
|
||||
},
|
||||
define:
|
||||
{
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false', // string replace at build-time
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
esbuildOptions: {
|
||||
define: { 'process.env.NODE_DEBUG_NATIVE': 'false' },
|
||||
},
|
||||
},
|
||||
})
|
||||
34
bindings/javascript/examples/wasm/index.html
Normal file
34
bindings/javascript/examples/wasm/index.html
Normal file
@@ -0,0 +1,34 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta content="text/html;charset=utf-8" http-equiv="Content-Type"/>
|
||||
</head>
|
||||
<body>
|
||||
<button id="run">Run</button>
|
||||
<script type="module">
|
||||
import { Database, opfsSetup } from "@tursodatabase/database";
|
||||
var opfs = await opfsSetup("local.db");
|
||||
console.info(opfs);
|
||||
async function consume() {
|
||||
console.info('take', opfs.take());
|
||||
setTimeout(consume, 1000);
|
||||
}
|
||||
consume();
|
||||
async function tick() {
|
||||
console.info('tick');
|
||||
setTimeout(tick, 1000);
|
||||
}
|
||||
tick();
|
||||
|
||||
async function run() {
|
||||
const db = new Database(opfs);
|
||||
console.info('inited');
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
console.info('created');
|
||||
await db.exec("INSERT INTO t VALUES (1)");
|
||||
console.info('inserted');
|
||||
}
|
||||
document.getElementById("run").onclick = run;
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
19
bindings/javascript/examples/wasm/package.json
Normal file
19
bindings/javascript/examples/wasm/package.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "wasm",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"devDependencies": {
|
||||
"vite": "^7.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "../.."
|
||||
}
|
||||
}
|
||||
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
26
bindings/javascript/examples/wasm/vite.config.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import { defineConfig, searchForWorkspaceRoot } from 'vite'
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
minify: false, // Set this to false to disable minification
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
'@tursodatabase/database-wasm32-wasi': '../../turso.wasi-browser.js'
|
||||
},
|
||||
},
|
||||
server: {
|
||||
fs: {
|
||||
allow: ['.']
|
||||
},
|
||||
headers: {
|
||||
'Cross-Origin-Opener-Policy': 'same-origin',
|
||||
'Cross-Origin-Embedder-Policy': 'require-corp',
|
||||
}
|
||||
},
|
||||
optimizeDeps: {
|
||||
exclude: [
|
||||
"@tursodatabase/database-wasm32-wasi",
|
||||
]
|
||||
},
|
||||
})
|
||||
19
bindings/javascript/package-lock.json
generated
19
bindings/javascript/package-lock.json
generated
@@ -9,7 +9,11 @@
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"packages/sync/common",
|
||||
"packages/sync/native",
|
||||
"packages/sync/browser"
|
||||
]
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
@@ -1103,6 +1107,10 @@
|
||||
"resolved": "packages/browser",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-browser-common": {
|
||||
"resolved": "packages/browser-common",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@tursodatabase/database-common": {
|
||||
"resolved": "packages/common",
|
||||
"link": true
|
||||
@@ -2489,6 +2497,7 @@
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -2499,6 +2508,14 @@
|
||||
"vitest": "^3.2.4"
|
||||
}
|
||||
},
|
||||
"packages/browser-common": {
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.1.5",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
},
|
||||
"packages/common": {
|
||||
"name": "@tursodatabase/database-common",
|
||||
"version": "0.1.5",
|
||||
|
||||
@@ -7,7 +7,11 @@
|
||||
"workspaces": [
|
||||
"packages/common",
|
||||
"packages/native",
|
||||
"packages/browser"
|
||||
"packages/browser",
|
||||
"packages/browser-common",
|
||||
"packages/sync/common",
|
||||
"packages/sync/native",
|
||||
"packages/sync/browser"
|
||||
],
|
||||
"version": "0.1.5"
|
||||
}
|
||||
|
||||
8
bindings/javascript/packages/browser-common/README.md
Normal file
8
bindings/javascript/packages/browser-common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/database` or `@tursodatabase/database-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
239
bindings/javascript/packages/browser-common/index.ts
Normal file
239
bindings/javascript/packages/browser-common/index.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
function getUint8ArrayFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): Uint8Array {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
function getStringFromMemory(memory: WebAssembly.Memory, ptr: number, len: number): string {
|
||||
const shared = getUint8ArrayFromMemory(memory, ptr, len);
|
||||
const copy = new Uint8Array(shared.length);
|
||||
copy.set(shared);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
return decoder.decode(copy);
|
||||
}
|
||||
|
||||
interface BrowserImports {
|
||||
is_web_worker(): boolean;
|
||||
lookup_file(ptr: number, len: number): number;
|
||||
read(handle: number, ptr: number, len: number, offset: number): number;
|
||||
write(handle: number, ptr: number, len: number, offset: number): number;
|
||||
sync(handle: number): number;
|
||||
truncate(handle: number, len: number): number;
|
||||
size(handle: number): number;
|
||||
}
|
||||
|
||||
function panic(name): never {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const MainDummyImports: BrowserImports = {
|
||||
is_web_worker: function (): boolean {
|
||||
return false;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
panic("lookup_file")
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("read")
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
panic("write")
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
panic("sync")
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
panic("truncate")
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
panic("size")
|
||||
}
|
||||
};
|
||||
|
||||
function workerImports(opfs: OpfsDirectory, memory: WebAssembly.Memory): BrowserImports {
|
||||
return {
|
||||
is_web_worker: function (): boolean {
|
||||
return true;
|
||||
},
|
||||
lookup_file: function (ptr: number, len: number): number {
|
||||
try {
|
||||
const handle = opfs.lookupFileHandle(getStringFromMemory(memory, ptr, len));
|
||||
return handle == null ? -404 : handle;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
read: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.read(handle, getUint8ArrayFromMemory(memory, ptr, len), offset);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
write: function (handle: number, ptr: number, len: number, offset: number): number {
|
||||
try {
|
||||
return opfs.write(handle, getUint8ArrayFromMemory(memory, ptr, len), offset)
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
sync: function (handle: number): number {
|
||||
try {
|
||||
opfs.sync(handle);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
truncate: function (handle: number, len: number): number {
|
||||
try {
|
||||
opfs.truncate(handle, len);
|
||||
return 0;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
},
|
||||
size: function (handle: number): number {
|
||||
try {
|
||||
return opfs.size(handle);
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OpfsDirectory {
|
||||
fileByPath: Map<String, { handle: number, sync: FileSystemSyncAccessHandle }>;
|
||||
fileByHandle: Map<number, FileSystemSyncAccessHandle>;
|
||||
fileHandleNo: number;
|
||||
|
||||
constructor() {
|
||||
this.fileByPath = new Map();
|
||||
this.fileByHandle = new Map();
|
||||
this.fileHandleNo = 0;
|
||||
}
|
||||
|
||||
async registerFile(path: string) {
|
||||
if (this.fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
this.fileHandleNo += 1;
|
||||
this.fileByPath.set(path, { handle: this.fileHandleNo, sync: opfsSync });
|
||||
this.fileByHandle.set(this.fileHandleNo, opfsSync);
|
||||
}
|
||||
|
||||
async unregisterFile(path: string) {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
this.fileByPath.delete(path);
|
||||
this.fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
lookupFileHandle(path: string): number | null {
|
||||
try {
|
||||
const file = this.fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', path, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
read(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
write(handle: number, buffer: Uint8Array, offset: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, buffer.length, offset, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
sync(handle: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
file.flush();
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
truncate(handle: number, size: number) {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
size(handle: number): number {
|
||||
try {
|
||||
const file = this.fileByHandle.get(handle);
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var workerRequestId = 0;
|
||||
function waitForWorkerResponse(worker: Worker, id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => worker.removeEventListener("message", callback);
|
||||
|
||||
worker.addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
function registerFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "register", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
function unregisterFileAtWorker(worker: Worker, path: string): Promise<void> {
|
||||
workerRequestId += 1;
|
||||
const currentId = workerRequestId;
|
||||
const promise = waitForWorkerResponse(worker, currentId);
|
||||
worker.postMessage({ __turso__: "unregister", path: path, id: currentId });
|
||||
return promise;
|
||||
}
|
||||
|
||||
export { OpfsDirectory, workerImports, MainDummyImports, waitForWorkerResponse, registerFileAtWorker, unregisterFileAtWorker }
|
||||
25
bindings/javascript/packages/browser-common/package.json
Normal file
25
bindings/javascript/packages/browser-common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/database-browser-common",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
18
bindings/javascript/packages/browser-common/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
@@ -25,10 +26,6 @@ const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
function panic(name) {
|
||||
throw new Error(`method ${name} must be invoked only from the main thread`);
|
||||
}
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
@@ -49,14 +46,8 @@ const {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
is_web_worker: () => false,
|
||||
lookup_file: () => panic("lookup_file"),
|
||||
read: () => panic("read"),
|
||||
write: () => panic("write"),
|
||||
sync: () => panic("sync"),
|
||||
truncate: () => panic("truncate"),
|
||||
size: () => panic("size"),
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
|
||||
@@ -40,6 +40,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/database-browser-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +1,24 @@
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError } from "@tursodatabase/database-common"
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, NativeDatabase, DatabaseOpts, SqliteError, } from "@tursodatabase/database-common"
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
|
||||
let workerRequestId = 0;
|
||||
class Database extends DatabasePromise {
|
||||
files: string[];
|
||||
constructor(db: NativeDatabase, files: string[], opts: DatabaseOpts = {}) {
|
||||
path: string | null;
|
||||
constructor(db: NativeDatabase, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.files = files;
|
||||
this.path = fsPath;
|
||||
}
|
||||
async close() {
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += this.files.length;
|
||||
|
||||
let tasks = [];
|
||||
for (const file of this.files) {
|
||||
(MainWorker as any).postMessage({ __turso__: "unregister", path: file, id: currentId });
|
||||
tasks.push(waitFor(currentId));
|
||||
currentId += 1;
|
||||
if (this.path != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.path),
|
||||
unregisterFileAtWorker(MainWorker, `${this.path}-wal`)
|
||||
]);
|
||||
}
|
||||
await Promise.all(tasks);
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
function waitFor(id: number): Promise<any> {
|
||||
let waitResolve, waitReject;
|
||||
const callback = msg => {
|
||||
if (msg.data.id == id) {
|
||||
if (msg.data.error != null) {
|
||||
waitReject(msg.data.error)
|
||||
} else {
|
||||
waitResolve()
|
||||
}
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
const cleanup = () => (MainWorker as any).removeEventListener("message", callback);
|
||||
|
||||
(MainWorker as any).addEventListener("message", callback);
|
||||
const result = new Promise((resolve, reject) => {
|
||||
waitResolve = resolve;
|
||||
waitReject = reject;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
@@ -55,24 +29,18 @@ function waitFor(id: number): Promise<any> {
|
||||
async function connect(path: string, opts: DatabaseOpts = {}): Promise<Database> {
|
||||
if (path == ":memory:") {
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
return new Database(db, [], opts);
|
||||
return new Database(db, null, opts);
|
||||
}
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
|
||||
let currentId = workerRequestId;
|
||||
workerRequestId += 2;
|
||||
|
||||
let dbHandlePromise = waitFor(currentId);
|
||||
let walHandlePromise = waitFor(currentId + 1);
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}`, id: currentId });
|
||||
(MainWorker as any).postMessage({ __turso__: "register", path: `${path}-wal`, id: currentId + 1 });
|
||||
await Promise.all([dbHandlePromise, walHandlePromise]);
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, path),
|
||||
registerFileAtWorker(MainWorker, `${path}-wal`)
|
||||
]);
|
||||
const db = await nativeConnect(path, { tracing: opts.tracing });
|
||||
const files = [path, `${path}-wal`];
|
||||
return new Database(db, files, opts);
|
||||
return new Database(db, path, opts);
|
||||
}
|
||||
|
||||
export { connect, Database, SqliteError }
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
|
||||
@@ -1,108 +1,9 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from '@tursodatabase/database-browser-common';
|
||||
|
||||
var fileByPath = new Map();
|
||||
var fileByHandle = new Map();
|
||||
let fileHandles = 0;
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
function getUint8ArrayFromWasm(ptr, len) {
|
||||
ptr = ptr >>> 0;
|
||||
return new Uint8Array(memory.buffer).subarray(ptr, ptr + len);
|
||||
}
|
||||
|
||||
|
||||
async function registerFile(path) {
|
||||
if (fileByPath.has(path)) {
|
||||
return;
|
||||
}
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
const opfsHandle = await opfsRoot.getFileHandle(path, { create: true });
|
||||
const opfsSync = await opfsHandle.createSyncAccessHandle();
|
||||
fileHandles += 1;
|
||||
fileByPath.set(path, { handle: fileHandles, sync: opfsSync });
|
||||
fileByHandle.set(fileHandles, opfsSync);
|
||||
}
|
||||
|
||||
async function unregisterFile(path) {
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return;
|
||||
}
|
||||
fileByPath.delete(path);
|
||||
fileByHandle.delete(file.handle);
|
||||
file.sync.close();
|
||||
}
|
||||
|
||||
function lookup_file(pathPtr, pathLen) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(pathPtr, pathLen);
|
||||
const notShared = new Uint8Array(buffer.length);
|
||||
notShared.set(buffer);
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const path = decoder.decode(notShared);
|
||||
const file = fileByPath.get(path);
|
||||
if (file == null) {
|
||||
return -404;
|
||||
}
|
||||
return file.handle;
|
||||
} catch (e) {
|
||||
console.error('lookupFile', pathPtr, pathLen, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function read(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.read(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('read', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function write(handle, bufferPtr, bufferLen, offset) {
|
||||
try {
|
||||
const buffer = getUint8ArrayFromWasm(bufferPtr, bufferLen);
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.write(buffer, { at: Number(offset) });
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('write', handle, bufferPtr, bufferLen, offset, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function sync(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
file.flush();
|
||||
return 0;
|
||||
} catch (e) {
|
||||
console.error('sync', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function truncate(handle, size) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const result = file.truncate(size);
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error('truncate', handle, size, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
function size(handle) {
|
||||
try {
|
||||
const file = fileByHandle.get(Number(handle));
|
||||
const size = file.getSize()
|
||||
return size;
|
||||
} catch (e) {
|
||||
console.error('size', handle, e);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
@@ -124,14 +25,8 @@ const handler = new MessageHandler({
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
is_web_worker: () => true,
|
||||
lookup_file: lookup_file,
|
||||
read: read,
|
||||
write: write,
|
||||
sync: sync,
|
||||
truncate: truncate,
|
||||
size: size,
|
||||
}
|
||||
},
|
||||
})
|
||||
@@ -141,16 +36,16 @@ const handler = new MessageHandler({
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await registerFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await unregisterFile(e.data.path)
|
||||
self.postMessage({ id: e.data.id })
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ export interface NativeDatabase {
|
||||
|
||||
prepare(sql: string): NativeStatement;
|
||||
|
||||
pluck(pluckMode: boolean);
|
||||
defaultSafeIntegers(toggle: boolean);
|
||||
totalChanges(): number;
|
||||
changes(): number;
|
||||
@@ -32,6 +31,11 @@ export const STEP_ROW = 1;
|
||||
export const STEP_DONE = 2;
|
||||
export const STEP_IO = 3;
|
||||
|
||||
export interface TableColumn {
|
||||
name: string,
|
||||
type: string
|
||||
}
|
||||
|
||||
export interface NativeStatement {
|
||||
stepAsync(): Promise<number>;
|
||||
stepSync(): number;
|
||||
@@ -39,7 +43,7 @@ export interface NativeStatement {
|
||||
pluck(pluckMode: boolean);
|
||||
safeIntegers(toggle: boolean);
|
||||
raw(toggle: boolean);
|
||||
columns(): string[];
|
||||
columns(): TableColumn[];
|
||||
row(): any;
|
||||
reset();
|
||||
finalize();
|
||||
|
||||
16
bindings/javascript/packages/native/index.d.ts
vendored
16
bindings/javascript/packages/native/index.d.ts
vendored
@@ -91,6 +91,14 @@ export declare class Database {
|
||||
ioLoopAsync(): Promise<void>
|
||||
}
|
||||
|
||||
export declare class Opfs {
|
||||
constructor()
|
||||
}
|
||||
|
||||
export declare class OpfsFile {
|
||||
|
||||
}
|
||||
|
||||
/** A prepared statement. */
|
||||
export declare class Statement {
|
||||
reset(): void
|
||||
@@ -144,6 +152,14 @@ export declare class Statement {
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export declare function connect(path: string, opts?: DatabaseOpts | undefined | null): Promise<unknown>
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* turso-db in the the browser requires explicit thread pool initialization
|
||||
* so, we just put no-op task on the thread pool and force emnapi to allocate web worker
|
||||
*/
|
||||
export declare function initThreadPool(): Promise<unknown>
|
||||
|
||||
@@ -508,6 +508,10 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement } = nativeBinding
|
||||
const { Database, Opfs, OpfsFile, Statement, connect, initThreadPool } = nativeBinding
|
||||
export { Database }
|
||||
export { Opfs }
|
||||
export { OpfsFile }
|
||||
export { Statement }
|
||||
export { connect }
|
||||
export { initThreadPool }
|
||||
|
||||
11
bindings/javascript/replace.sh
Normal file
11
bindings/javascript/replace.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/common/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/native/package.json
|
||||
sed -i "s/$NAME_FROM/$NAME_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/common/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/native/package.json
|
||||
sed -i "s/$VERSION_FROM/$VERSION_TO/g" packages/browser/package.json
|
||||
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/promise.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/native/compat.ts
|
||||
sed -i "s/$NAME_FROM\/database-common/$NAME_TO\/database-common/g" packages/browser/promise.ts
|
||||
@@ -28,7 +28,6 @@ pub fn init_thread_pool() -> napi::Result<AsyncTask<NoopTask>> {
|
||||
|
||||
pub struct ConnectTask {
|
||||
path: String,
|
||||
is_memory: bool,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
}
|
||||
|
||||
@@ -70,7 +69,7 @@ impl Task for ConnectTask {
|
||||
Some(result.db),
|
||||
self.io.clone(),
|
||||
result.conn,
|
||||
self.is_memory,
|
||||
self.path.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -88,16 +87,11 @@ pub fn connect(path: String, opts: Option<DatabaseOpts>) -> Result<AsyncTask<Con
|
||||
let task = if is_memory(&path) {
|
||||
ConnectTask {
|
||||
io: Arc::new(turso_core::MemoryIO::new()),
|
||||
is_memory: true,
|
||||
path,
|
||||
}
|
||||
} else {
|
||||
let io = Arc::new(Opfs::new()?);
|
||||
ConnectTask {
|
||||
io,
|
||||
is_memory: false,
|
||||
path,
|
||||
}
|
||||
ConnectTask { io, path }
|
||||
};
|
||||
Ok(AsyncTask::new(task))
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ pub struct Database {
|
||||
_db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Option<Arc<turso_core::Connection>>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
is_open: Cell<bool>,
|
||||
default_safe_integers: Cell<bool>,
|
||||
}
|
||||
@@ -186,20 +186,20 @@ impl Database {
|
||||
.connect()
|
||||
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to connect: {e}")))?;
|
||||
|
||||
Ok(Self::create(Some(db), io, conn, is_memory(&path)))
|
||||
Ok(Self::create(Some(db), io, conn, path))
|
||||
}
|
||||
|
||||
pub fn create(
|
||||
db: Option<Arc<turso_core::Database>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
conn: Arc<turso_core::Connection>,
|
||||
is_memory: bool,
|
||||
path: String,
|
||||
) -> Self {
|
||||
Database {
|
||||
_db: db,
|
||||
io,
|
||||
conn: Some(conn),
|
||||
is_memory,
|
||||
path,
|
||||
is_open: Cell::new(true),
|
||||
default_safe_integers: Cell::new(false),
|
||||
}
|
||||
@@ -218,7 +218,13 @@ impl Database {
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn memory(&self) -> bool {
|
||||
self.is_memory
|
||||
is_memory(&self.path)
|
||||
}
|
||||
|
||||
/// Returns whether the database is in memory-only mode.
|
||||
#[napi(getter)]
|
||||
pub fn path(&self) -> String {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
/// Returns whether the database connection is open.
|
||||
@@ -246,7 +252,7 @@ impl Database {
|
||||
/// * `sql` - The SQL statements to execute.
|
||||
///
|
||||
/// # Returns
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn batch_async(&self, sql: String) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Batch {
|
||||
conn: self.conn()?.clone(),
|
||||
@@ -319,7 +325,7 @@ impl Database {
|
||||
#[napi]
|
||||
pub fn close(&mut self) -> Result<()> {
|
||||
self.is_open.set(false);
|
||||
let _ = self._db.take().unwrap();
|
||||
let _ = self._db.take();
|
||||
let _ = self.conn.take().unwrap();
|
||||
Ok(())
|
||||
}
|
||||
@@ -482,7 +488,7 @@ impl Statement {
|
||||
|
||||
/// Step the statement and return result code (executed on the background thread):
|
||||
/// 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<number>")]
|
||||
pub fn step_async(&self) -> Result<AsyncTask<DbTask>> {
|
||||
Ok(AsyncTask::new(DbTask::Step {
|
||||
stmt: self.stmt.clone(),
|
||||
@@ -577,7 +583,7 @@ impl Statement {
|
||||
}
|
||||
|
||||
/// Get column information for the statement
|
||||
#[napi]
|
||||
#[napi(ts_return_type = "Promise<any>")]
|
||||
pub fn columns<'env>(&self, env: &'env Env) -> Result<Array<'env>> {
|
||||
let stmt_ref = self.stmt.borrow();
|
||||
let stmt = stmt_ref
|
||||
|
||||
@@ -21,3 +21,6 @@ tracing-subscriber = "0.3.19"
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.2.3"
|
||||
|
||||
[features]
|
||||
browser = ["turso_node/browser"]
|
||||
124
bindings/javascript/sync/packages/browser/README.md
Normal file
124
bindings/javascript/sync/packages/browser/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Browser</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Browser.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database-browser
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database-browser';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
@@ -1,17 +1,18 @@
|
||||
import {
|
||||
createOnMessage as __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext as __emnapiGetDefaultContext,
|
||||
instantiateNapiModuleSync as __emnapiInstantiateNapiModuleSync,
|
||||
instantiateNapiModule as __emnapiInstantiateNapiModule,
|
||||
WASI as __WASI,
|
||||
} from '@napi-rs/wasm-runtime'
|
||||
|
||||
import { MainDummyImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
|
||||
const __wasi = new __WASI({
|
||||
version: 'preview1',
|
||||
})
|
||||
|
||||
const __wasmUrl = new URL('./turso-sync-js.wasm32-wasi.wasm', import.meta.url).href
|
||||
const __wasmUrl = new URL('./sync.wasm32-wasi.wasm', import.meta.url).href
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
|
||||
@@ -23,19 +24,21 @@ const __sharedMemory = new WebAssembly.Memory({
|
||||
|
||||
const __wasmFile = await fetch(__wasmUrl).then((res) => res.arrayBuffer())
|
||||
|
||||
export let MainWorker = null;
|
||||
|
||||
const {
|
||||
instance: __napiInstance,
|
||||
module: __wasiModule,
|
||||
napiModule: __napiModule,
|
||||
} = __emnapiInstantiateNapiModuleSync(__wasmFile, {
|
||||
} = await __emnapiInstantiateNapiModule(__wasmFile, {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: 4,
|
||||
asyncWorkPoolSize: 1,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(new URL('./wasi-worker-browser.mjs', import.meta.url), {
|
||||
const worker = new Worker(new URL('./worker.mjs', import.meta.url), {
|
||||
type: 'module',
|
||||
})
|
||||
|
||||
MainWorker = worker;
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
@@ -43,6 +46,7 @@ const {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...MainDummyImports,
|
||||
memory: __sharedMemory,
|
||||
}
|
||||
return importObject
|
||||
@@ -58,11 +62,15 @@ const {
|
||||
export default __napiModule.exports
|
||||
export const Database = __napiModule.exports.Database
|
||||
export const Statement = __napiModule.exports.Statement
|
||||
export const Opfs = __napiModule.exports.Opfs
|
||||
export const OpfsFile = __napiModule.exports.OpfsFile
|
||||
export const connect = __napiModule.exports.connect
|
||||
export const initThreadPool = __napiModule.exports.initThreadPool
|
||||
export const GeneratorHolder = __napiModule.exports.GeneratorHolder
|
||||
export const JsDataCompletion = __napiModule.exports.JsDataCompletion
|
||||
export const JsDataPollResult = __napiModule.exports.JsDataPollResult
|
||||
export const JsProtocolIo = __napiModule.exports.JsProtocolIo
|
||||
export const JsProtocolRequestData = __napiModule.exports.JsProtocolRequestData
|
||||
export const JsProtocolRequestBytes = __napiModule.exports.JsProtocolRequestBytes
|
||||
export const SyncEngine = __napiModule.exports.SyncEngine
|
||||
export const DatabaseChangeTypeJs = __napiModule.exports.DatabaseChangeTypeJs
|
||||
export const SyncEngineProtocolVersion = __napiModule.exports.SyncEngineProtocolVersion
|
||||
|
||||
46
bindings/javascript/sync/packages/browser/package.json
Normal file
46
bindings/javascript/sync/packages/browser/package.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-browser",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/promise.js",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"index.js",
|
||||
"worker.mjs",
|
||||
"sync.wasm32-wasi.wasm",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@vitest/browser": "^3.2.4",
|
||||
"playwright": "^1.55.0",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --features browser --release --platform --target wasm32-wasip1-threads --no-js --manifest-path ../../Cargo.toml --output-dir . && rm index.d.ts sync.wasi* wasi* browser.js",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=chromium --run && VITE_TURSO_DB_URL=http://b--a--a.localhost:10000 CI=1 vitest --browser=firefox --run"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@napi-rs/wasm-runtime": "^1.0.3",
|
||||
"@tursodatabase/sync-common": "^0.1.5",
|
||||
"@tursodatabase/database-common": "^0.1.5"
|
||||
}
|
||||
}
|
||||
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
281
bindings/javascript/sync/packages/browser/promise.test.ts
Normal file
@@ -0,0 +1,281 @@
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const stmt = db2.prepare('SELECT * FROM q');
|
||||
const rows = await stmt.all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
stmt.close();
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
113
bindings/javascript/sync/packages/browser/promise.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { registerFileAtWorker, unregisterFileAtWorker } from "@tursodatabase/database-browser-common"
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult, memoryIO } from "@tursodatabase/sync-common";
|
||||
import { connect as nativeConnect, initThreadPool, MainWorker } from "#index";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
|
||||
let BrowserIo: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
const result = localStorage.getItem(path);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return new TextEncoder().encode(result);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const array = new Uint8Array(data);
|
||||
const value = new TextDecoder('utf-8').decode(array);
|
||||
localStorage.setItem(path, value);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
fsPath: string | null;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, fsPath: string | null, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.fsPath = fsPath;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.db.close();
|
||||
this.engine.close();
|
||||
if (this.fsPath != null) {
|
||||
await Promise.all([
|
||||
unregisterFileAtWorker(MainWorker, this.fsPath),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-wal`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-revert`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-info`),
|
||||
unregisterFileAtWorker(MainWorker, `${this.fsPath}-changes`),
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
const isMemory = opts.path == ':memory:';
|
||||
let io = isMemory ? memoryIO() : BrowserIo;
|
||||
|
||||
await initThreadPool();
|
||||
if (MainWorker == null) {
|
||||
throw new Error("panic: MainWorker is not set");
|
||||
}
|
||||
if (!isMemory) {
|
||||
await Promise.all([
|
||||
registerFileAtWorker(MainWorker, opts.path),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-wal`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-revert`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-info`),
|
||||
registerFileAtWorker(MainWorker, `${opts.path}-changes`),
|
||||
]);
|
||||
}
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, isMemory ? null : opts.path, {});
|
||||
}
|
||||
|
||||
export { connect, Database, }
|
||||
export type { DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"declaration": true,
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"dom",
|
||||
"DOM",
|
||||
"WebWorker"
|
||||
],
|
||||
"paths": {
|
||||
"#entry-point": [
|
||||
"#index": [
|
||||
"./index.js"
|
||||
]
|
||||
}
|
||||
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
23
bindings/javascript/sync/packages/browser/vitest.config.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
define: {
|
||||
'process.env.NODE_DEBUG_NATIVE': 'false',
|
||||
},
|
||||
server: {
|
||||
headers: {
|
||||
"Cross-Origin-Embedder-Policy": "require-corp",
|
||||
"Cross-Origin-Opener-Policy": "same-origin"
|
||||
},
|
||||
},
|
||||
test: {
|
||||
browser: {
|
||||
enabled: true,
|
||||
provider: 'playwright',
|
||||
instances: [
|
||||
{ browser: 'chromium' },
|
||||
{ browser: 'firefox' }
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1,13 +1,18 @@
|
||||
import { instantiateNapiModuleSync, MessageHandler, WASI } from '@napi-rs/wasm-runtime'
|
||||
import { OpfsDirectory, workerImports } from "@tursodatabase/database-browser-common";
|
||||
|
||||
var opfs = new OpfsDirectory();
|
||||
var memory = null;
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
memory = wasmMemory;
|
||||
const wasi = new WASI({
|
||||
print: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log.apply(console, arguments)
|
||||
},
|
||||
printErr: function() {
|
||||
printErr: function () {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error.apply(console, arguments)
|
||||
},
|
||||
@@ -20,6 +25,7 @@ const handler = new MessageHandler({
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
...workerImports(opfs, memory),
|
||||
memory: wasmMemory,
|
||||
}
|
||||
},
|
||||
@@ -27,6 +33,23 @@ const handler = new MessageHandler({
|
||||
},
|
||||
})
|
||||
|
||||
globalThis.onmessage = function (e) {
|
||||
globalThis.onmessage = async function (e) {
|
||||
if (e.data.__turso__ == 'register') {
|
||||
try {
|
||||
await opfs.registerFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
} else if (e.data.__turso__ == 'unregister') {
|
||||
try {
|
||||
await opfs.unregisterFile(e.data.path);
|
||||
self.postMessage({ id: e.data.id });
|
||||
} catch (error) {
|
||||
self.postMessage({ id: e.data.id, error: error });
|
||||
}
|
||||
return;
|
||||
}
|
||||
handler.handle(e)
|
||||
}
|
||||
8
bindings/javascript/sync/packages/common/README.md
Normal file
8
bindings/javascript/sync/packages/common/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## About
|
||||
|
||||
This package is the Turso Sync common JS library which is shared between final builds for Node and Browser.
|
||||
|
||||
Do not use this package directly - instead you must use `@tursodatabase/sync` or `@tursodatabase/sync-browser`.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
5
bindings/javascript/sync/packages/common/index.ts
Normal file
5
bindings/javascript/sync/packages/common/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { run, memoryIO } from "./run.js"
|
||||
import { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "./types.js"
|
||||
|
||||
export { run, memoryIO, }
|
||||
export type { SyncOpts, ProtocolIo, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
25
bindings/javascript/sync/packages/common/package.json
Normal file
25
bindings/javascript/sync/packages/common/package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-common",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"files": [
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"scripts": {
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run tsc-build",
|
||||
"test": "echo 'no tests'"
|
||||
}
|
||||
}
|
||||
127
bindings/javascript/sync/packages/common/run.ts
Normal file
127
bindings/javascript/sync/packages/common/run.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
"use strict";
|
||||
|
||||
import { GeneratorResponse, ProtocolIo, RunOpts } from "./types.js";
|
||||
|
||||
const GENERATOR_RESUME_IO = 0;
|
||||
const GENERATOR_RESUME_DONE = 1;
|
||||
|
||||
interface TrackPromise<T> {
|
||||
promise: Promise<T>,
|
||||
finished: boolean
|
||||
}
|
||||
|
||||
function trackPromise<T>(p: Promise<T>): TrackPromise<T> {
|
||||
let status = { promise: null, finished: false };
|
||||
status.promise = p.finally(() => status.finished = true);
|
||||
return status;
|
||||
}
|
||||
|
||||
function timeoutMs(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function process(opts: RunOpts, io: ProtocolIo, request: any) {
|
||||
const requestType = request.request();
|
||||
const completion = request.completion();
|
||||
if (requestType.type == 'Http') {
|
||||
try {
|
||||
let headers = opts.headers;
|
||||
if (requestType.headers != null && requestType.headers.length > 0) {
|
||||
headers = { ...opts.headers };
|
||||
for (let header of requestType.headers) {
|
||||
headers[header[0]] = header[1];
|
||||
}
|
||||
}
|
||||
const response = await fetch(`${opts.url}${requestType.path}`, {
|
||||
method: requestType.method,
|
||||
headers: headers,
|
||||
body: requestType.body != null ? new Uint8Array(requestType.body) : null,
|
||||
});
|
||||
completion.status(response.status);
|
||||
const reader = response.body.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
completion.done();
|
||||
break;
|
||||
}
|
||||
completion.pushBuffer(value);
|
||||
}
|
||||
} catch (error) {
|
||||
completion.poison(`fetch error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullRead') {
|
||||
try {
|
||||
const metadata = await io.read(requestType.path);
|
||||
if (metadata != null) {
|
||||
completion.pushBuffer(metadata);
|
||||
}
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata read error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullWrite') {
|
||||
try {
|
||||
await io.write(requestType.path, requestType.content);
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata write error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'Transform') {
|
||||
if (opts.transform == null) {
|
||||
completion.poison("transform is not set");
|
||||
return;
|
||||
}
|
||||
const results = [];
|
||||
for (const mutation of requestType.mutations) {
|
||||
const result = opts.transform(mutation);
|
||||
if (result == null) {
|
||||
results.push({ type: 'Keep' });
|
||||
} else if (result.operation == 'skip') {
|
||||
results.push({ type: 'Skip' });
|
||||
} else if (result.operation == 'rewrite') {
|
||||
results.push({ type: 'Rewrite', stmt: result.stmt });
|
||||
} else {
|
||||
completion.poison("unexpected transform operation");
|
||||
return;
|
||||
}
|
||||
}
|
||||
completion.pushTransform(results);
|
||||
completion.done();
|
||||
}
|
||||
}
|
||||
|
||||
export function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export async function run(opts: RunOpts, io: ProtocolIo, engine: any, generator: any): Promise<any> {
|
||||
let tasks = [];
|
||||
while (true) {
|
||||
const { type, ...rest }: GeneratorResponse = await generator.resumeAsync(null);
|
||||
if (type == 'Done') {
|
||||
return null;
|
||||
}
|
||||
if (type == 'SyncEngineStats') {
|
||||
return rest;
|
||||
}
|
||||
for (let request = engine.protocolIo(); request != null; request = engine.protocolIo()) {
|
||||
tasks.push(trackPromise(process(opts, io, request)));
|
||||
}
|
||||
|
||||
const tasksRace = tasks.length == 0 ? Promise.resolve() : Promise.race([timeoutMs(opts.preemptionMs), ...tasks.map(t => t.promise)]);
|
||||
await Promise.all([engine.ioLoopAsync(), tasksRace]);
|
||||
|
||||
tasks = tasks.filter(t => !t.finished);
|
||||
}
|
||||
return generator.take();
|
||||
}
|
||||
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
17
bindings/javascript/sync/packages/common/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020",
|
||||
"dom"
|
||||
],
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
50
bindings/javascript/sync/packages/common/types.ts
Normal file
50
bindings/javascript/sync/packages/common/types.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
export declare const enum DatabaseChangeType {
|
||||
Insert = 0,
|
||||
Update = 1,
|
||||
Delete = 2
|
||||
}
|
||||
|
||||
export interface DatabaseRowMutation {
|
||||
changeTime: number
|
||||
tableName: string
|
||||
id: number
|
||||
changeType: DatabaseChangeType
|
||||
before?: Record<string, any>
|
||||
after?: Record<string, any>
|
||||
updates?: Record<string, any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResult = { operation: 'skip' } | { operation: 'rewrite', stmt: DatabaseRowStatement } | null;
|
||||
export type Transform = (arg: DatabaseRowMutation) => DatabaseRowTransformResult;
|
||||
export interface RunOpts {
|
||||
preemptionMs: number,
|
||||
url: string,
|
||||
headers: { [K: string]: string }
|
||||
transform?: Transform,
|
||||
}
|
||||
|
||||
export interface ProtocolIo {
|
||||
read(path: string): Promise<Buffer | Uint8Array | null>;
|
||||
write(path: string, content: Buffer | Uint8Array): Promise<void>;
|
||||
}
|
||||
|
||||
export interface SyncOpts {
|
||||
path: string;
|
||||
clientName?: string;
|
||||
url: string;
|
||||
authToken?: string;
|
||||
encryptionKey?: string;
|
||||
tablesIgnore?: string[],
|
||||
transform?: Transform,
|
||||
tracing?: string,
|
||||
}
|
||||
|
||||
export interface DatabaseRowStatement {
|
||||
sql: string
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }
|
||||
125
bindings/javascript/sync/packages/native/README.md
Normal file
125
bindings/javascript/sync/packages/native/README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
<p align="center">
|
||||
<h1 align="center">Turso Database for JavaScript in Node</h1>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a title="JavaScript" target="_blank" href="https://www.npmjs.com/package/@tursodatabase/database"><img alt="npm" src="https://img.shields.io/npm/v/@tursodatabase/database"></a>
|
||||
<a title="MIT" target="_blank" href="https://github.com/tursodatabase/turso/blob/main/LICENSE.md"><img src="http://img.shields.io/badge/license-MIT-orange.svg?style=flat-square"></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a title="Users Discord" target="_blank" href="https://tur.so/discord"><img alt="Chat with other users of Turso on Discord" src="https://img.shields.io/discord/933071162680958986?label=Discord&logo=Discord&style=social"></a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
This package is the Turso embedded database library for JavaScript in Node.
|
||||
|
||||
> **⚠️ Warning:** This software is ALPHA, only use for development, testing, and experimentation. We are working to make it production ready, but do not use it for critical data right now.
|
||||
|
||||
## Features
|
||||
|
||||
- **SQLite compatible:** SQLite query language and file format support ([status](https://github.com/tursodatabase/turso/blob/main/COMPAT.md)).
|
||||
- **In-process**: No network overhead, runs directly in your Node.js process
|
||||
- **TypeScript support**: Full TypeScript definitions included
|
||||
- **Cross-platform**: Supports Linux (x86 and arm64), macOS, Windows (browser is supported in the separate package `@tursodatabase/database-browser` package)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @tursodatabase/database
|
||||
```
|
||||
|
||||
## Getting Started
|
||||
|
||||
### In-Memory Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create an in-memory database
|
||||
const db = await connect(':memory:');
|
||||
|
||||
// Create a table
|
||||
await db.exec('CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT)');
|
||||
|
||||
// Insert data
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
await insert.run('Alice', 'alice@example.com');
|
||||
await insert.run('Bob', 'bob@example.com');
|
||||
|
||||
// Query data
|
||||
const users = await db.prepare('SELECT * FROM users').all();
|
||||
console.log(users);
|
||||
// Output: [
|
||||
// { id: 1, name: 'Alice', email: 'alice@example.com' },
|
||||
// { id: 2, name: 'Bob', email: 'bob@example.com' }
|
||||
// ]
|
||||
```
|
||||
|
||||
### File-Based Database
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
// Create or open a database file
|
||||
const db = await connect('my-database.db');
|
||||
|
||||
// Create a table
|
||||
await db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS posts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
`);
|
||||
|
||||
// Insert a post
|
||||
const insertPost = db.prepare('INSERT INTO posts (title, content) VALUES (?, ?)');
|
||||
const result = await insertPost.run('Hello World', 'This is my first blog post!');
|
||||
|
||||
console.log(`Inserted post with ID: ${result.lastInsertRowid}`);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```javascript
|
||||
import { connect } from '@tursodatabase/database';
|
||||
|
||||
const db = await connect('transactions.db');
|
||||
|
||||
// Using transactions for atomic operations
|
||||
const transaction = db.transaction(async (users) => {
|
||||
const insert = db.prepare('INSERT INTO users (name, email) VALUES (?, ?)');
|
||||
for (const user of users) {
|
||||
await insert.run(user.name, user.email);
|
||||
}
|
||||
});
|
||||
|
||||
// Execute transaction
|
||||
await transaction([
|
||||
{ name: 'Alice', email: 'alice@example.com' },
|
||||
{ name: 'Bob', email: 'bob@example.com' }
|
||||
]);
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For complete API documentation, see [JavaScript API Reference](../../../../docs/javascript-api-reference.md).
|
||||
|
||||
## Related Packages
|
||||
|
||||
* The [@tursodatabase/serverless](https://www.npmjs.com/package/@tursodatabase/serverless) package provides a serverless driver with the same API.
|
||||
* The [@tursodatabase/sync](https://www.npmjs.com/package/@tursodatabase/sync) package provides bidirectional sync between a local Turso database and Turso Cloud.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT license](../../LICENSE.md).
|
||||
|
||||
## Support
|
||||
|
||||
- [GitHub Issues](https://github.com/tursodatabase/turso/issues)
|
||||
- [Documentation](https://docs.turso.tech)
|
||||
- [Discord Community](https://tur.so/discord)
|
||||
@@ -8,13 +8,15 @@ export declare class Database {
|
||||
* # Arguments
|
||||
* * `path` - The path to the database file.
|
||||
*/
|
||||
constructor(path: string)
|
||||
constructor(path: string, opts?: DatabaseOpts | undefined | null)
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get memory(): boolean
|
||||
/** Returns whether the database is in memory-only mode. */
|
||||
get path(): string
|
||||
/** Returns whether the database connection is open. */
|
||||
get open(): boolean
|
||||
/**
|
||||
* Executes a batch of SQL statements.
|
||||
* Executes a batch of SQL statements on main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
@@ -22,7 +24,17 @@ export declare class Database {
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batch(sql: string): void
|
||||
batchSync(sql: string): void
|
||||
/**
|
||||
* Executes a batch of SQL statements outside of main thread
|
||||
*
|
||||
* # Arguments
|
||||
*
|
||||
* * `sql` - The SQL statements to execute.
|
||||
*
|
||||
* # Returns
|
||||
*/
|
||||
batchAsync(sql: string): Promise<void>
|
||||
/**
|
||||
* Prepares a statement for execution.
|
||||
*
|
||||
@@ -105,10 +117,15 @@ export declare class Statement {
|
||||
*/
|
||||
bindAt(index: number, value: unknown): void
|
||||
/**
|
||||
* Step the statement and return result code:
|
||||
* Step the statement and return result code (executed on the main thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
step(): number
|
||||
stepSync(): number
|
||||
/**
|
||||
* Step the statement and return result code (executed on the background thread):
|
||||
* 1 = Row available, 2 = Done, 3 = I/O needed
|
||||
*/
|
||||
stepAsync(): Promise<number>
|
||||
/** Get the current row data according to the presentation mode */
|
||||
row(): unknown
|
||||
/** Sets the presentation mode to raw. */
|
||||
@@ -124,31 +141,32 @@ export declare class Statement {
|
||||
*/
|
||||
safeIntegers(toggle?: boolean | undefined | null): void
|
||||
/** Get column information for the statement */
|
||||
columns(): unknown[]
|
||||
columns(): Promise<any>
|
||||
/** Finalizes the statement. */
|
||||
finalize(): void
|
||||
}
|
||||
|
||||
export interface DatabaseOpts {
|
||||
tracing?: string
|
||||
}
|
||||
export declare class GeneratorHolder {
|
||||
resume(error?: string | undefined | null): number
|
||||
take(): GeneratorResponse | null
|
||||
resumeSync(error?: string | undefined | null): GeneratorResponse
|
||||
resumeAsync(error?: string | undefined | null): Promise<unknown>
|
||||
}
|
||||
|
||||
export declare class JsDataCompletion {
|
||||
poison(err: string): void
|
||||
status(value: number): void
|
||||
push(value: Buffer): void
|
||||
pushBuffer(value: Buffer): void
|
||||
pushTransform(values: Array<DatabaseRowTransformResultJs>): void
|
||||
done(): void
|
||||
}
|
||||
|
||||
export declare class JsDataPollResult {
|
||||
|
||||
}
|
||||
|
||||
export declare class JsProtocolIo {
|
||||
takeRequest(): JsProtocolRequestData | null
|
||||
takeRequest(): JsProtocolRequestBytes | null
|
||||
}
|
||||
|
||||
export declare class JsProtocolRequestData {
|
||||
export declare class JsProtocolRequestBytes {
|
||||
request(): JsProtocolRequest
|
||||
completion(): JsDataCompletion
|
||||
}
|
||||
@@ -159,13 +177,14 @@ export declare class SyncEngine {
|
||||
ioLoopSync(): void
|
||||
/** Runs the I/O loop asynchronously, returning a Promise. */
|
||||
ioLoopAsync(): Promise<void>
|
||||
protocolIo(): JsProtocolRequestData | null
|
||||
protocolIo(): JsProtocolRequestBytes | null
|
||||
sync(): GeneratorHolder
|
||||
push(): GeneratorHolder
|
||||
stats(): GeneratorHolder
|
||||
pull(): GeneratorHolder
|
||||
checkpoint(): GeneratorHolder
|
||||
open(): Database
|
||||
close(): void
|
||||
}
|
||||
|
||||
export declare const enum DatabaseChangeTypeJs {
|
||||
@@ -193,21 +212,29 @@ export interface DatabaseRowStatementJs {
|
||||
values: Array<any>
|
||||
}
|
||||
|
||||
export type DatabaseRowTransformResultJs =
|
||||
| { type: 'Keep' }
|
||||
| { type: 'Skip' }
|
||||
| { type: 'Rewrite', stmt: DatabaseRowStatementJs }
|
||||
|
||||
export type GeneratorResponse =
|
||||
| { type: 'SyncEngineStats', operations: number, wal: number }
|
||||
| { type: 'IO' }
|
||||
| { type: 'Done' }
|
||||
| { type: 'SyncEngineStats', operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime?: number }
|
||||
|
||||
export type JsProtocolRequest =
|
||||
| { type: 'Http', method: string, path: string, body?: Array<number>, headers: Array<[string, string]> }
|
||||
| { type: 'FullRead', path: string }
|
||||
| { type: 'FullWrite', path: string, content: Array<number> }
|
||||
| { type: 'Transform', mutations: Array<DatabaseRowMutationJs> }
|
||||
|
||||
export interface SyncEngineOpts {
|
||||
path: string
|
||||
clientName?: string
|
||||
walPullBatchSize?: number
|
||||
enableTracing?: string
|
||||
tracing?: string
|
||||
tablesIgnore?: Array<string>
|
||||
transform?: (arg: DatabaseRowMutationJs) => DatabaseRowStatementJs | null
|
||||
useTransform: boolean
|
||||
protocolVersion?: SyncEngineProtocolVersion
|
||||
}
|
||||
|
||||
520
bindings/javascript/sync/packages/native/index.js
Normal file
520
bindings/javascript/sync/packages/native/index.js
Normal file
@@ -0,0 +1,520 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-android-arm-eabi')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-android-arm-eabi/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-x64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-x64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./sync.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-ia32-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-ia32-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-win32-arm64-msvc')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-win32-arm64-msvc/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./sync.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-universal')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-universal/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-darwin-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-darwin-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-freebsd-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-freebsd-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-x64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-x64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-musleabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-musleabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-arm-gnueabihf')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-arm-gnueabihf/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-musl')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-musl/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./sync.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-riscv64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-riscv64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./sync.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-ppc64-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-ppc64-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./sync.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-linux-s390x-gnu')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-linux-s390x-gnu/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./sync.openharmony-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-x64')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-x64/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./sync.openharmony-arm.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
const binding = require('@tursodatabase/sync-openharmony-arm')
|
||||
const bindingPackageVersion = require('@tursodatabase/sync-openharmony-arm/package.json').version
|
||||
if (bindingPackageVersion !== '0.1.5-pre.3' && process.env.NAPI_RS_ENFORCE_VERSION_CHECK && process.env.NAPI_RS_ENFORCE_VERSION_CHECK !== '0') {
|
||||
throw new Error(`Native binding package version mismatch, expected 0.1.5-pre.3 but got ${bindingPackageVersion}. You can reinstall dependencies to fix this issue.`)
|
||||
}
|
||||
return binding
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./sync.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/sync-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement, GeneratorHolder, JsDataCompletion, JsProtocolIo, JsProtocolRequestBytes, SyncEngine, DatabaseChangeTypeJs, SyncEngineProtocolVersion } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
export { GeneratorHolder }
|
||||
export { JsDataCompletion }
|
||||
export { JsProtocolIo }
|
||||
export { JsProtocolRequestBytes }
|
||||
export { SyncEngine }
|
||||
export { DatabaseChangeTypeJs }
|
||||
export { SyncEngineProtocolVersion }
|
||||
53
bindings/javascript/sync/packages/native/package.json
Normal file
53
bindings/javascript/sync/packages/native/package.json
Normal file
@@ -0,0 +1,53 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"license": "MIT",
|
||||
"module": "./dist/promise.js",
|
||||
"main": "./dist/promise.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist/promise.js",
|
||||
"./compat": "./dist/compat.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"dist/**",
|
||||
"README.md"
|
||||
],
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.1.5",
|
||||
"@types/node": "^24.3.1",
|
||||
"typescript": "^5.9.2",
|
||||
"vitest": "^3.2.4"
|
||||
},
|
||||
"scripts": {
|
||||
"napi-build": "napi build --platform --release --esm --manifest-path ../../Cargo.toml --output-dir .",
|
||||
"napi-dirs": "napi create-npm-dirs",
|
||||
"napi-artifacts": "napi artifacts --output-dir .",
|
||||
"tsc-build": "npm exec tsc",
|
||||
"build": "npm run napi-build && npm run tsc-build",
|
||||
"test": "vitest --run",
|
||||
"prepublishOnly": "npm run napi-dirs && npm run napi-artifacts && napi prepublish -t npm"
|
||||
},
|
||||
"napi": {
|
||||
"binaryName": "sync",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database-common": "^0.1.5",
|
||||
"@tursodatabase/sync-common": "^0.1.5"
|
||||
},
|
||||
"imports": {
|
||||
"#index": "./index.js"
|
||||
}
|
||||
}
|
||||
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
288
bindings/javascript/sync/packages/native/promise.test.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
import { unlinkSync } from "node:fs";
|
||||
import { expect, test } from 'vitest'
|
||||
import { connect, DatabaseRowMutation, DatabaseRowTransformResult } from './promise.js'
|
||||
|
||||
const localeCompare = (a, b) => a.x.localeCompare(b.x);
|
||||
|
||||
test('select-after-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
await db.push();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([{ x: 1 }, { x: 2 }, { x: 3 }])
|
||||
}
|
||||
})
|
||||
|
||||
test('select-without-push', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS t(x)");
|
||||
await db.exec("DELETE FROM t");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("INSERT INTO t VALUES (1), (2), (3)");
|
||||
}
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db.prepare('SELECT * FROM t').all();
|
||||
expect(rows).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
test('merge-non-overlapping-keys', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k3', 'value3'), ('k4', 'value4'), ('k5', 'value5')");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value3' }, { x: 'k4', y: 'value4' }, { x: 'k5', y: 'value5' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'value1' }, { x: 'k2', y: 'value2' }, { x: 'k3', y: 'value5' }, { x: 'k4', y: 'value4' }];
|
||||
expect(rows1.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
expect(rows2.sort(localeCompare)).toEqual(expected.sort(localeCompare))
|
||||
})
|
||||
|
||||
test('last-push-wins-with-delete', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO q VALUES ('k1', 'value1'), ('k2', 'value2'), ('k4', 'value4')");
|
||||
await db1.exec("DELETE FROM q")
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO q VALUES ('k1', 'value3'), ('k2', 'value4'), ('k3', 'value5')");
|
||||
|
||||
await db2.push();
|
||||
await db1.push();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM q').all();
|
||||
const rows2 = await db1.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k3', y: 'value5' }];
|
||||
expect(rows1).toEqual(expected)
|
||||
expect(rows2).toEqual(expected)
|
||||
})
|
||||
|
||||
test('constraint-conflict', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS u(x TEXT PRIMARY KEY, y UNIQUE)");
|
||||
await db.exec("DELETE FROM u");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec("INSERT INTO u VALUES ('k1', 'value1')");
|
||||
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec("INSERT INTO u VALUES ('k2', 'value1')");
|
||||
|
||||
await db1.push();
|
||||
await expect(async () => await db2.push()).rejects.toThrow('SQLite error: UNIQUE constraint failed: u.y');
|
||||
})
|
||||
|
||||
test('checkpoint', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`INSERT INTO q VALUES ('k${i}', 'v${i}')`);
|
||||
}
|
||||
expect((await db1.stats()).mainWal).toBeGreaterThan(4096 * 1000);
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).mainWal).toBe(0);
|
||||
let revertWal = (await db1.stats()).revertWal;
|
||||
expect(revertWal).toBeLessThan(4096 * 1000 / 100);
|
||||
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
await db1.exec(`UPDATE q SET y = 'u${i}' WHERE x = 'k${i}'`);
|
||||
}
|
||||
await db1.checkpoint();
|
||||
expect((await db1.stats()).revertWal).toBe(revertWal);
|
||||
})
|
||||
|
||||
test('persistence', async () => {
|
||||
{
|
||||
const db = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL });
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS q(x TEXT PRIMARY KEY, y)");
|
||||
await db.exec("DELETE FROM q");
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const path = `test-${(Math.random() * 10000) | 0}.db`;
|
||||
try {
|
||||
{
|
||||
const db1 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db1.exec(`INSERT INTO q VALUES ('k1', 'v1')`);
|
||||
await db1.exec(`INSERT INTO q VALUES ('k2', 'v2')`);
|
||||
await db1.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db2 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db2.exec(`INSERT INTO q VALUES ('k3', 'v3')`);
|
||||
await db2.exec(`INSERT INTO q VALUES ('k4', 'v4')`);
|
||||
const rows = await db2.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db2.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db3 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
await db3.push();
|
||||
await db3.close();
|
||||
}
|
||||
|
||||
{
|
||||
const db4 = await connect({ path: path, url: process.env.VITE_TURSO_DB_URL });
|
||||
const rows = await db4.prepare('SELECT * FROM q').all();
|
||||
const expected = [{ x: 'k1', y: 'v1' }, { x: 'k2', y: 'v2' }, { x: 'k3', y: 'v3' }, { x: 'k4', y: 'v4' }];
|
||||
expect(rows).toEqual(expected)
|
||||
await db4.close();
|
||||
}
|
||||
} finally {
|
||||
unlinkSync(path);
|
||||
unlinkSync(`${path}-wal`);
|
||||
unlinkSync(`${path}-info`);
|
||||
unlinkSync(`${path}-changes`);
|
||||
try { unlinkSync(`${path}-revert`) } catch (e) { }
|
||||
}
|
||||
})
|
||||
|
||||
test('transform', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 2 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 2 }]);
|
||||
})
|
||||
|
||||
test('transform-many', async () => {
|
||||
{
|
||||
const db = await connect({
|
||||
path: ':memory:',
|
||||
url: process.env.VITE_TURSO_DB_URL,
|
||||
});
|
||||
await db.exec("CREATE TABLE IF NOT EXISTS counter(key TEXT PRIMARY KEY, value INTEGER)");
|
||||
await db.exec("DELETE FROM counter");
|
||||
await db.exec("INSERT INTO counter VALUES ('1', 0)")
|
||||
await db.push();
|
||||
await db.close();
|
||||
}
|
||||
const transform = (m: DatabaseRowMutation) => ({
|
||||
operation: 'rewrite',
|
||||
stmt: {
|
||||
sql: `UPDATE counter SET value = value + ? WHERE key = ?`,
|
||||
values: [m.after.value - m.before.value, m.after.key]
|
||||
}
|
||||
} as DatabaseRowTransformResult);
|
||||
const db1 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
const db2 = await connect({ path: ':memory:', url: process.env.VITE_TURSO_DB_URL, transform: transform });
|
||||
|
||||
for (let i = 0; i < 1002; i++) {
|
||||
await db1.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
for (let i = 0; i < 1001; i++) {
|
||||
await db2.exec("UPDATE counter SET value = value + 1 WHERE key = '1'");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
await Promise.all([db1.push(), db2.push()]);
|
||||
console.info('push', performance.now() - start);
|
||||
|
||||
start = performance.now();
|
||||
await Promise.all([db1.pull(), db2.pull()]);
|
||||
console.info('pull', performance.now() - start);
|
||||
|
||||
const rows1 = await db1.prepare('SELECT * FROM counter').all();
|
||||
const rows2 = await db2.prepare('SELECT * FROM counter').all();
|
||||
expect(rows1).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
expect(rows2).toEqual([{ key: '1', value: 1001 + 1002 }]);
|
||||
})
|
||||
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
104
bindings/javascript/sync/packages/native/promise.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { DatabasePromise, DatabaseOpts, NativeDatabase } from "@tursodatabase/database-common"
|
||||
import { ProtocolIo, run, SyncOpts, RunOpts, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult } from "@tursodatabase/sync-common";
|
||||
import { Database as NativeDB, SyncEngine } from "#index";
|
||||
import { promises } from "node:fs";
|
||||
|
||||
let NodeIO: ProtocolIo = {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
try {
|
||||
return await promises.readFile(path);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
const unix = Math.floor(Date.now() / 1000);
|
||||
const nonce = Math.floor(Math.random() * 1000000000);
|
||||
const tmp = `${path}.tmp.${unix}.${nonce}`;
|
||||
await promises.writeFile(tmp, new Uint8Array(data));
|
||||
try {
|
||||
await promises.rename(tmp, path);
|
||||
} catch (err) {
|
||||
await promises.unlink(tmp);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function memoryIO(): ProtocolIo {
|
||||
let values = new Map();
|
||||
return {
|
||||
async read(path: string): Promise<Buffer | Uint8Array | null> {
|
||||
return values.get(path);
|
||||
},
|
||||
async write(path: string, data: Buffer | Uint8Array): Promise<void> {
|
||||
values.set(path, data);
|
||||
}
|
||||
}
|
||||
};
|
||||
class Database extends DatabasePromise {
|
||||
runOpts: RunOpts;
|
||||
engine: any;
|
||||
io: ProtocolIo;
|
||||
constructor(db: NativeDatabase, io: ProtocolIo, runOpts: RunOpts, engine: any, opts: DatabaseOpts = {}) {
|
||||
super(db, opts)
|
||||
this.runOpts = runOpts;
|
||||
this.engine = engine;
|
||||
this.io = io;
|
||||
}
|
||||
async sync() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.sync());
|
||||
}
|
||||
async pull() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.pull());
|
||||
}
|
||||
async push() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.push());
|
||||
}
|
||||
async checkpoint() {
|
||||
await run(this.runOpts, this.io, this.engine, this.engine.checkpoint());
|
||||
}
|
||||
async stats(): Promise<{ operations: number, mainWal: number, revertWal: number, lastPullUnixTime: number, lastPushUnixTime: number | null }> {
|
||||
return (await run(this.runOpts, this.io, this.engine, this.engine.stats()));
|
||||
}
|
||||
override async close(): Promise<void> {
|
||||
this.engine.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new database connection asynchronously.
|
||||
*
|
||||
* @param {string} path - Path to the database file.
|
||||
* @param {Object} opts - Options for database behavior.
|
||||
* @returns {Promise<Database>} - A promise that resolves to a Database instance.
|
||||
*/
|
||||
async function connect(opts: SyncOpts): Promise<Database> {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
useTransform: opts.transform != null,
|
||||
tracing: opts.tracing,
|
||||
protocolVersion: 1
|
||||
});
|
||||
const runOpts: RunOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
preemptionMs: 1,
|
||||
transform: opts.transform,
|
||||
};
|
||||
let io = opts.path == ':memory:' ? memoryIO() : NodeIO;
|
||||
await run(runOpts, io, engine, engine.init());
|
||||
|
||||
const nativeDb = engine.open();
|
||||
return new Database(nativeDb as any, io, runOpts, engine, {});
|
||||
}
|
||||
|
||||
export { connect, Database, DatabaseRowMutation, DatabaseRowStatement, DatabaseRowTransformResult }
|
||||
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
21
bindings/javascript/sync/packages/native/tsconfig.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"module": "nodenext",
|
||||
"target": "esnext",
|
||||
"outDir": "dist/",
|
||||
"lib": [
|
||||
"es2020"
|
||||
],
|
||||
"paths": {
|
||||
"#index": [
|
||||
"./index.d.ts"
|
||||
]
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"*"
|
||||
]
|
||||
}
|
||||
102
bindings/javascript/sync/src/generator.rs
Normal file
102
bindings/javascript/sync/src/generator.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use napi::{bindgen_prelude::AsyncTask, Env, Task};
|
||||
use napi_derive::napi;
|
||||
use std::{
|
||||
future::Future,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use turso_sync_engine::types::ProtocolCommand;
|
||||
|
||||
pub const GENERATOR_RESUME_IO: u32 = 0;
|
||||
pub const GENERATOR_RESUME_DONE: u32 = 1;
|
||||
|
||||
pub trait Generator {
|
||||
fn resume(&mut self, result: Option<String>) -> napi::Result<GeneratorResponse>;
|
||||
}
|
||||
|
||||
impl<F: Future<Output = turso_sync_engine::Result<()>>> Generator
|
||||
for genawaiter::sync::Gen<ProtocolCommand, turso_sync_engine::Result<()>, F>
|
||||
{
|
||||
fn resume(&mut self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = match error {
|
||||
Some(err) => Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
format!("JsProtocolIo error: {err}"),
|
||||
)),
|
||||
None => Ok(()),
|
||||
};
|
||||
match self.resume_with(result) {
|
||||
genawaiter::GeneratorState::Yielded(ProtocolCommand::IO) => Ok(GeneratorResponse::IO),
|
||||
genawaiter::GeneratorState::Complete(Ok(())) => Ok(GeneratorResponse::Done),
|
||||
genawaiter::GeneratorState::Complete(Err(err)) => Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("sync engine operation failed: {err}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
pub enum GeneratorResponse {
|
||||
IO,
|
||||
Done,
|
||||
SyncEngineStats {
|
||||
operations: i64,
|
||||
main_wal: i64,
|
||||
revert_wal: i64,
|
||||
last_pull_unix_time: i64,
|
||||
last_push_unix_time: Option<i64>,
|
||||
},
|
||||
}
|
||||
|
||||
#[napi]
|
||||
#[derive(Clone)]
|
||||
pub struct GeneratorHolder {
|
||||
pub(crate) generator: Arc<Mutex<dyn Generator>>,
|
||||
pub(crate) response: Arc<Mutex<Option<GeneratorResponse>>>,
|
||||
}
|
||||
|
||||
pub struct ResumeTask {
|
||||
holder: GeneratorHolder,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
unsafe impl Send for ResumeTask {}
|
||||
|
||||
impl Task for ResumeTask {
|
||||
type Output = GeneratorResponse;
|
||||
type JsValue = GeneratorResponse;
|
||||
|
||||
fn compute(&mut self) -> napi::Result<Self::Output> {
|
||||
resume_sync(&self.holder, self.error.take())
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: Env, output: Self::Output) -> napi::Result<Self::JsValue> {
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
fn resume_sync(holder: &GeneratorHolder, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
let result = holder.generator.lock().unwrap().resume(error)?;
|
||||
if let GeneratorResponse::Done = result {
|
||||
let response = holder.response.lock().unwrap().take();
|
||||
Ok(response.unwrap_or(GeneratorResponse::Done))
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GeneratorHolder {
|
||||
#[napi]
|
||||
pub fn resume_sync(&self, error: Option<String>) -> napi::Result<GeneratorResponse> {
|
||||
resume_sync(&self, error)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn resume_async(&self, error: Option<String>) -> napi::Result<AsyncTask<ResumeTask>> {
|
||||
Ok(AsyncTask::new(ResumeTask {
|
||||
holder: self.clone(),
|
||||
error,
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,15 @@ use std::{
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use turso_sync_engine::protocol_io::{DataCompletion, DataPollResult, ProtocolIO};
|
||||
use turso_sync_engine::{
|
||||
protocol_io::{DataCompletion, DataPollResult, ProtocolIO},
|
||||
types::{DatabaseRowTransformResult, DatabaseStatementReplay},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
core_change_type_to_js, core_values_map_to_js, js_value_to_core, DatabaseRowMutationJs,
|
||||
DatabaseRowTransformResultJs,
|
||||
};
|
||||
|
||||
#[napi]
|
||||
pub enum JsProtocolRequest {
|
||||
@@ -24,15 +32,34 @@ pub enum JsProtocolRequest {
|
||||
path: String,
|
||||
content: Vec<u8>,
|
||||
},
|
||||
Transform {
|
||||
mutations: Vec<DatabaseRowMutationJs>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[napi]
|
||||
pub struct JsDataCompletion(Arc<Mutex<JsDataCompletionInner>>);
|
||||
|
||||
pub struct JsBytesPollResult(Buffer);
|
||||
|
||||
impl DataPollResult<u8> for JsBytesPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
pub struct JsTransformPollResult(Vec<DatabaseRowTransformResult>);
|
||||
|
||||
impl DataPollResult<DatabaseRowTransformResult> for JsTransformPollResult {
|
||||
fn data(&self) -> &[DatabaseRowTransformResult] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
struct JsDataCompletionInner {
|
||||
status: Option<u16>,
|
||||
chunks: VecDeque<Buffer>,
|
||||
transformed: VecDeque<DatabaseRowTransformResult>,
|
||||
finished: bool,
|
||||
err: Option<String>,
|
||||
}
|
||||
@@ -49,8 +76,8 @@ impl JsDataCompletion {
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion for JsDataCompletion {
|
||||
type DataPollResult = JsDataPollResult;
|
||||
impl DataCompletion<u8> for JsDataCompletion {
|
||||
type DataPollResult = JsBytesPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
@@ -60,7 +87,31 @@ impl DataCompletion for JsDataCompletion {
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.chunks.pop_front();
|
||||
Ok(chunk.map(JsDataPollResult))
|
||||
Ok(chunk.map(JsBytesPollResult))
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.finished)
|
||||
}
|
||||
}
|
||||
|
||||
impl DataCompletion<DatabaseRowTransformResult> for JsDataCompletion {
|
||||
type DataPollResult = JsTransformPollResult;
|
||||
|
||||
fn status(&self) -> turso_sync_engine::Result<Option<u16>> {
|
||||
let inner = self.inner()?;
|
||||
Ok(inner.status)
|
||||
}
|
||||
|
||||
fn poll_data(&self) -> turso_sync_engine::Result<Option<Self::DataPollResult>> {
|
||||
let mut inner = self.inner()?;
|
||||
let chunk = inner.transformed.drain(..).collect::<Vec<_>>();
|
||||
if chunk.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(JsTransformPollResult(chunk)))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_done(&self) -> turso_sync_engine::Result<bool> {
|
||||
@@ -84,11 +135,28 @@ impl JsDataCompletion {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push(&self, value: Buffer) {
|
||||
pub fn push_buffer(&self, value: Buffer) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
completion.chunks.push_back(value);
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push_transform(&self, values: Vec<DatabaseRowTransformResultJs>) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
for value in values {
|
||||
completion.transformed.push_back(match value {
|
||||
DatabaseRowTransformResultJs::Keep => DatabaseRowTransformResult::Keep,
|
||||
DatabaseRowTransformResultJs::Skip => DatabaseRowTransformResult::Skip,
|
||||
DatabaseRowTransformResultJs::Rewrite { stmt } => {
|
||||
DatabaseRowTransformResult::Rewrite(DatabaseStatementReplay {
|
||||
sql: stmt.sql,
|
||||
values: stmt.values.into_iter().map(js_value_to_core).collect(),
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn done(&self) {
|
||||
let mut completion = self.0.lock().unwrap();
|
||||
@@ -97,22 +165,13 @@ impl JsDataCompletion {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsDataPollResult(Buffer);
|
||||
|
||||
impl DataPollResult for JsDataPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolRequestData {
|
||||
pub struct JsProtocolRequestBytes {
|
||||
request: Arc<Mutex<Option<JsProtocolRequest>>>,
|
||||
completion: JsDataCompletion,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl JsProtocolRequestData {
|
||||
impl JsProtocolRequestBytes {
|
||||
#[napi]
|
||||
pub fn request(&self) -> JsProtocolRequest {
|
||||
let mut request = self.request.lock().unwrap();
|
||||
@@ -125,7 +184,9 @@ impl JsProtocolRequestData {
|
||||
}
|
||||
|
||||
impl ProtocolIO for JsProtocolIo {
|
||||
type DataCompletion = JsDataCompletion;
|
||||
type DataCompletionBytes = JsDataCompletion;
|
||||
type DataCompletionTransform = JsDataCompletion;
|
||||
|
||||
fn http(
|
||||
&self,
|
||||
method: &str,
|
||||
@@ -144,7 +205,7 @@ impl ProtocolIO for JsProtocolIo {
|
||||
}))
|
||||
}
|
||||
|
||||
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletion> {
|
||||
fn full_read(&self, path: &str) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullRead {
|
||||
path: path.to_string(),
|
||||
}))
|
||||
@@ -154,17 +215,37 @@ impl ProtocolIO for JsProtocolIo {
|
||||
&self,
|
||||
path: &str,
|
||||
content: Vec<u8>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletion> {
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionBytes> {
|
||||
Ok(self.add_request(JsProtocolRequest::FullWrite {
|
||||
path: path.to_string(),
|
||||
content,
|
||||
}))
|
||||
}
|
||||
|
||||
fn transform(
|
||||
&self,
|
||||
mutations: Vec<turso_sync_engine::types::DatabaseRowMutation>,
|
||||
) -> turso_sync_engine::Result<Self::DataCompletionTransform> {
|
||||
Ok(self.add_request(JsProtocolRequest::Transform {
|
||||
mutations: mutations
|
||||
.into_iter()
|
||||
.map(|mutation| DatabaseRowMutationJs {
|
||||
change_time: mutation.change_time as i64,
|
||||
table_name: mutation.table_name,
|
||||
id: mutation.id,
|
||||
change_type: core_change_type_to_js(mutation.change_type),
|
||||
before: mutation.before.map(core_values_map_to_js),
|
||||
after: mutation.after.map(core_values_map_to_js),
|
||||
updates: mutation.updates.map(core_values_map_to_js),
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct JsProtocolIo {
|
||||
requests: Mutex<Vec<JsProtocolRequestData>>,
|
||||
requests: Mutex<Vec<JsProtocolRequestBytes>>,
|
||||
}
|
||||
|
||||
impl Default for JsProtocolIo {
|
||||
@@ -178,13 +259,14 @@ impl Default for JsProtocolIo {
|
||||
#[napi]
|
||||
impl JsProtocolIo {
|
||||
#[napi]
|
||||
pub fn take_request(&self) -> Option<JsProtocolRequestData> {
|
||||
pub fn take_request(&self) -> Option<JsProtocolRequestBytes> {
|
||||
self.requests.lock().unwrap().pop()
|
||||
}
|
||||
|
||||
fn add_request(&self, request: JsProtocolRequest) -> JsDataCompletion {
|
||||
let completion = JsDataCompletionInner {
|
||||
chunks: VecDeque::new(),
|
||||
transformed: VecDeque::new(),
|
||||
finished: false,
|
||||
err: None,
|
||||
status: None,
|
||||
@@ -192,7 +274,7 @@ impl JsProtocolIo {
|
||||
let completion = JsDataCompletion(Arc::new(Mutex::new(completion)));
|
||||
|
||||
let mut requests = self.requests.lock().unwrap();
|
||||
requests.push(JsProtocolRequestData {
|
||||
requests.push(JsProtocolRequestBytes {
|
||||
request: Arc::new(Mutex::new(Some(request))),
|
||||
completion: completion.clone(),
|
||||
});
|
||||
@@ -9,24 +9,18 @@ use std::{
|
||||
sync::{Arc, Mutex, OnceLock, RwLock, RwLockReadGuard, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
use napi::{
|
||||
bindgen_prelude::{AsyncTask, Either5, Function, FunctionRef, Null},
|
||||
Env,
|
||||
};
|
||||
use napi::bindgen_prelude::{AsyncTask, Either5, Null};
|
||||
use napi_derive::napi;
|
||||
use tracing_subscriber::{filter::LevelFilter, fmt::format::FmtSpan};
|
||||
use turso_node::IoLoopTask;
|
||||
use turso_sync_engine::{
|
||||
database_sync_engine::{DatabaseSyncEngine, DatabaseSyncEngineOpts},
|
||||
types::{
|
||||
Coro, DatabaseChangeType, DatabaseRowMutation, DatabaseRowStatement,
|
||||
DatabaseSyncEngineProtocolVersion,
|
||||
},
|
||||
types::{Coro, DatabaseChangeType, DatabaseSyncEngineProtocolVersion},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
generator::{GeneratorHolder, GeneratorResponse},
|
||||
js_protocol_io::{JsProtocolIo, JsProtocolRequestData},
|
||||
js_protocol_io::{JsProtocolIo, JsProtocolRequestBytes},
|
||||
};
|
||||
|
||||
#[napi(object)]
|
||||
@@ -41,10 +35,10 @@ pub struct SyncEngine {
|
||||
wal_pull_batch_size: u32,
|
||||
protocol_version: DatabaseSyncEngineProtocolVersion,
|
||||
tables_ignore: Vec<String>,
|
||||
transform: Option<FunctionRef<DatabaseRowMutationJs, Option<DatabaseRowStatementJs>>>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
protocol: Arc<JsProtocolIo>,
|
||||
sync_engine: Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>,
|
||||
use_transform: bool,
|
||||
io: Option<Arc<dyn turso_core::IO>>,
|
||||
protocol: Option<Arc<JsProtocolIo>>,
|
||||
sync_engine: Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
opened: Arc<Mutex<Option<turso_node::Database>>>,
|
||||
}
|
||||
|
||||
@@ -116,19 +110,27 @@ pub struct DatabaseRowStatementJs {
|
||||
pub values: Vec<Either5<Null, i64, f64, String, Vec<u8>>>,
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
#[derive(Debug)]
|
||||
pub enum DatabaseRowTransformResultJs {
|
||||
Keep,
|
||||
Skip,
|
||||
Rewrite { stmt: DatabaseRowStatementJs },
|
||||
}
|
||||
|
||||
#[napi(object, object_to_js = false)]
|
||||
pub struct SyncEngineOpts {
|
||||
pub path: String,
|
||||
pub client_name: Option<String>,
|
||||
pub wal_pull_batch_size: Option<u32>,
|
||||
pub enable_tracing: Option<String>,
|
||||
pub tracing: Option<String>,
|
||||
pub tables_ignore: Option<Vec<String>>,
|
||||
pub transform: Option<Function<'static, DatabaseRowMutationJs, Option<DatabaseRowStatementJs>>>,
|
||||
pub use_transform: bool,
|
||||
pub protocol_version: Option<SyncEngineProtocolVersion>,
|
||||
}
|
||||
|
||||
static TRACING_INIT: OnceLock<()> = OnceLock::new();
|
||||
fn init_tracing(level_filter: LevelFilter) {
|
||||
pub fn init_tracing(level_filter: LevelFilter) {
|
||||
TRACING_INIT.get_or_init(|| {
|
||||
tracing_subscriber::fmt()
|
||||
.with_ansi(false)
|
||||
@@ -144,7 +146,7 @@ impl SyncEngine {
|
||||
#[napi(constructor)]
|
||||
pub fn new(opts: SyncEngineOpts) -> napi::Result<Self> {
|
||||
// helpful for local debugging
|
||||
match opts.enable_tracing.as_deref() {
|
||||
match opts.tracing.as_deref() {
|
||||
Some("info") => init_tracing(LevelFilter::INFO),
|
||||
Some("debug") => init_tracing(LevelFilter::DEBUG),
|
||||
Some("trace") => init_tracing(LevelFilter::TRACE),
|
||||
@@ -154,23 +156,30 @@ impl SyncEngine {
|
||||
let io: Arc<dyn turso_core::IO> = if is_memory {
|
||||
Arc::new(turso_core::MemoryIO::new())
|
||||
} else {
|
||||
#[cfg(not(feature = "browser"))]
|
||||
{
|
||||
Arc::new(turso_core::PlatformIO::new().map_err(|e| {
|
||||
napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("Failed to create IO: {e}"),
|
||||
)
|
||||
})?)
|
||||
}
|
||||
#[cfg(feature = "browser")]
|
||||
{
|
||||
Arc::new(turso_node::browser::Opfs::new()?)
|
||||
}
|
||||
};
|
||||
Ok(SyncEngine {
|
||||
path: opts.path,
|
||||
client_name: opts.client_name.unwrap_or("turso-sync-js".to_string()),
|
||||
wal_pull_batch_size: opts.wal_pull_batch_size.unwrap_or(100),
|
||||
tables_ignore: opts.tables_ignore.unwrap_or_default(),
|
||||
transform: opts.transform.map(|x| x.create_ref().unwrap()),
|
||||
use_transform: opts.use_transform,
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
sync_engine: Arc::new(RwLock::new(None)),
|
||||
io,
|
||||
protocol: Arc::new(JsProtocolIo::default()),
|
||||
io: Some(io),
|
||||
protocol: Some(Arc::new(JsProtocolIo::default())),
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
opened: Arc::new(Mutex::new(None)),
|
||||
protocol_version: match opts.protocol_version {
|
||||
@@ -183,76 +192,40 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn init(&mut self, env: Env) -> GeneratorHolder {
|
||||
#[allow(clippy::type_complexity)]
|
||||
let transform: Option<
|
||||
Arc<
|
||||
dyn Fn(
|
||||
&Env,
|
||||
DatabaseRowMutation,
|
||||
)
|
||||
-> turso_sync_engine::Result<Option<DatabaseRowStatement>>
|
||||
+ 'static,
|
||||
>,
|
||||
> = match self.transform.take() {
|
||||
Some(f) => Some(Arc::new(move |env, mutation| {
|
||||
let result = f
|
||||
.borrow_back(env)
|
||||
.unwrap()
|
||||
.call(DatabaseRowMutationJs {
|
||||
change_time: mutation.change_time as i64,
|
||||
table_name: mutation.table_name,
|
||||
id: mutation.id,
|
||||
change_type: core_change_type_to_js(mutation.change_type),
|
||||
before: mutation.before.map(core_values_map_to_js),
|
||||
after: mutation.after.map(core_values_map_to_js),
|
||||
updates: mutation.updates.map(core_values_map_to_js),
|
||||
})
|
||||
.map_err(|e| {
|
||||
turso_sync_engine::errors::Error::DatabaseSyncEngineError(format!(
|
||||
"transform callback failed: {e}"
|
||||
))
|
||||
})?;
|
||||
Ok(result.map(|statement| DatabaseRowStatement {
|
||||
sql: statement.sql,
|
||||
values: statement.values.into_iter().map(js_value_to_core).collect(),
|
||||
}))
|
||||
})),
|
||||
None => None,
|
||||
};
|
||||
pub fn init(&mut self) -> napi::Result<GeneratorHolder> {
|
||||
let opts = DatabaseSyncEngineOpts {
|
||||
client_name: self.client_name.clone(),
|
||||
wal_pull_batch_size: self.wal_pull_batch_size as u64,
|
||||
tables_ignore: self.tables_ignore.clone(),
|
||||
transform,
|
||||
use_transform: self.use_transform,
|
||||
protocol_version_hint: self.protocol_version,
|
||||
};
|
||||
|
||||
let protocol = self.protocol.clone();
|
||||
let io = self.io()?;
|
||||
let protocol = self.protocol()?;
|
||||
let sync_engine = self.sync_engine.clone();
|
||||
let io = self.io.clone();
|
||||
let opened = self.opened.clone();
|
||||
let path = self.path.clone();
|
||||
let generator = genawaiter::sync::Gen::new(|coro| async move {
|
||||
let coro = Coro::new(env, coro);
|
||||
let coro = Coro::new((), coro);
|
||||
let initialized =
|
||||
DatabaseSyncEngine::new(&coro, io.clone(), protocol, &path, opts).await?;
|
||||
let connection = initialized.connect_rw(&coro).await?;
|
||||
let db = turso_node::Database::create(None, io.clone(), connection, false);
|
||||
let db = turso_node::Database::create(None, io.clone(), connection, path);
|
||||
|
||||
*sync_engine.write().unwrap() = Some(initialized);
|
||||
*opened.lock().unwrap() = Some(db);
|
||||
Ok(())
|
||||
});
|
||||
GeneratorHolder {
|
||||
inner: Box::new(Mutex::new(generator)),
|
||||
Ok(GeneratorHolder {
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn io_loop_sync(&self) -> napi::Result<()> {
|
||||
self.io.step().map_err(|e| {
|
||||
self.io()?.step().map_err(|e| {
|
||||
napi::Error::new(napi::Status::GenericFailure, format!("IO error: {e}"))
|
||||
})?;
|
||||
Ok(())
|
||||
@@ -260,19 +233,19 @@ impl SyncEngine {
|
||||
|
||||
/// Runs the I/O loop asynchronously, returning a Promise.
|
||||
#[napi(ts_return_type = "Promise<void>")]
|
||||
pub fn io_loop_async(&self) -> AsyncTask<IoLoopTask> {
|
||||
let io = self.io.clone();
|
||||
AsyncTask::new(IoLoopTask { io })
|
||||
pub fn io_loop_async(&self) -> napi::Result<AsyncTask<IoLoopTask>> {
|
||||
let io = self.io()?;
|
||||
Ok(AsyncTask::new(IoLoopTask { io }))
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn protocol_io(&self) -> Option<JsProtocolRequestData> {
|
||||
self.protocol.take_request()
|
||||
pub fn protocol_io(&self) -> napi::Result<Option<JsProtocolRequestBytes>> {
|
||||
Ok(self.protocol()?.take_request())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn sync(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn sync(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.sync(coro).await?;
|
||||
@@ -281,8 +254,8 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn push(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn push(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
sync_engine.push_changes_to_remote(coro).await?;
|
||||
@@ -291,38 +264,34 @@ impl SyncEngine {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn stats(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn stats(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
let changes = sync_engine.stats(coro).await?;
|
||||
Ok(Some(GeneratorResponse::SyncEngineStats {
|
||||
operations: changes.cdc_operations,
|
||||
wal: changes.wal_size,
|
||||
main_wal: changes.main_wal_size as i64,
|
||||
revert_wal: changes.revert_wal_size as i64,
|
||||
last_pull_unix_time: changes.last_pull_unix_time,
|
||||
last_push_unix_time: changes.last_push_unix_time,
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn pull(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
let changes = {
|
||||
let sync_engine = try_read(sync_engine)?;
|
||||
let sync_engine = try_unwrap(&sync_engine)?;
|
||||
sync_engine.wait_changes_from_remote(coro).await?
|
||||
};
|
||||
if let Some(changes) = changes {
|
||||
pub fn pull(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.apply_changes_from_remote(coro, changes).await?;
|
||||
}
|
||||
sync_engine.pull_changes_from_remote(coro).await?;
|
||||
Ok(None)
|
||||
})
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn checkpoint(&self, env: Env) -> GeneratorHolder {
|
||||
self.run(env, async move |coro, sync_engine| {
|
||||
pub fn checkpoint(&self) -> GeneratorHolder {
|
||||
self.run(async move |coro, sync_engine| {
|
||||
let mut sync_engine = try_write(sync_engine)?;
|
||||
let sync_engine = try_unwrap_mut(&mut sync_engine)?;
|
||||
sync_engine.checkpoint(coro).await?;
|
||||
@@ -342,12 +311,38 @@ impl SyncEngine {
|
||||
Ok(opened.clone())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn close(&mut self) {
|
||||
let _ = self.sync_engine.write().unwrap().take();
|
||||
let _ = self.opened.lock().unwrap().take().unwrap();
|
||||
let _ = self.io.take();
|
||||
let _ = self.protocol.take();
|
||||
}
|
||||
|
||||
fn io(&self) -> napi::Result<Arc<dyn turso_core::IO>> {
|
||||
if self.io.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.io.as_ref().unwrap().clone())
|
||||
}
|
||||
fn protocol(&self) -> napi::Result<Arc<JsProtocolIo>> {
|
||||
if self.protocol.is_none() {
|
||||
return Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
"sync engine was closed",
|
||||
));
|
||||
}
|
||||
Ok(self.protocol.as_ref().unwrap().clone())
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
env: Env,
|
||||
f: impl AsyncFnOnce(
|
||||
&Coro<Env>,
|
||||
&Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>,
|
||||
&Coro<()>,
|
||||
&Arc<RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>>,
|
||||
) -> turso_sync_engine::Result<Option<GeneratorResponse>>
|
||||
+ 'static,
|
||||
) -> GeneratorHolder {
|
||||
@@ -357,21 +352,21 @@ impl SyncEngine {
|
||||
let generator = genawaiter::sync::Gen::new({
|
||||
let response = response.clone();
|
||||
|coro| async move {
|
||||
let coro = Coro::new(env, coro);
|
||||
let coro = Coro::new((), coro);
|
||||
*response.lock().unwrap() = f(&coro, &sync_engine).await?;
|
||||
Ok(())
|
||||
}
|
||||
});
|
||||
GeneratorHolder {
|
||||
inner: Box::new(Mutex::new(generator)),
|
||||
generator: Arc::new(Mutex::new(generator)),
|
||||
response,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn try_read(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>> {
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_read() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -382,9 +377,8 @@ fn try_read(
|
||||
}
|
||||
|
||||
fn try_write(
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>>
|
||||
{
|
||||
sync_engine: &RwLock<Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>> {
|
||||
let Ok(sync_engine) = sync_engine.try_write() else {
|
||||
let nasty_error = "sync_engine is busy".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -395,8 +389,8 @@ fn try_write(
|
||||
}
|
||||
|
||||
fn try_unwrap<'a>(
|
||||
sync_engine: &'a RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<&'a DatabaseSyncEngine<JsProtocolIo, Env>> {
|
||||
sync_engine: &'a RwLockReadGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_ref() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -407,8 +401,8 @@ fn try_unwrap<'a>(
|
||||
}
|
||||
|
||||
fn try_unwrap_mut<'a>(
|
||||
sync_engine: &'a mut RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo, Env>>>,
|
||||
) -> turso_sync_engine::Result<&'a mut DatabaseSyncEngine<JsProtocolIo, Env>> {
|
||||
sync_engine: &'a mut RwLockWriteGuard<'_, Option<DatabaseSyncEngine<JsProtocolIo>>>,
|
||||
) -> turso_sync_engine::Result<&'a mut DatabaseSyncEngine<JsProtocolIo>> {
|
||||
let Some(sync_engine) = sync_engine.as_mut() else {
|
||||
let error = "sync_engine must be initialized".to_string();
|
||||
return Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
@@ -1394,12 +1394,21 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@tursodatabase/database-browser-common@npm:^0.1.5, @tursodatabase/database-browser-common@workspace:packages/browser-common":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser-common@workspace:packages/browser-common"
|
||||
dependencies:
|
||||
typescript: "npm:^5.9.2"
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@tursodatabase/database-browser@workspace:packages/browser":
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@tursodatabase/database-browser@workspace:packages/browser"
|
||||
dependencies:
|
||||
"@napi-rs/cli": "npm:^3.1.5"
|
||||
"@napi-rs/wasm-runtime": "npm:^1.0.3"
|
||||
"@tursodatabase/database-browser-common": "npm:^0.1.5"
|
||||
"@tursodatabase/database-common": "npm:^0.1.5"
|
||||
"@vitest/browser": "npm:^3.2.4"
|
||||
playwright: "npm:^1.55.0"
|
||||
|
||||
@@ -10,9 +10,9 @@ use crate::{
|
||||
Result,
|
||||
};
|
||||
|
||||
pub struct DatabaseReplayGenerator<Ctx = ()> {
|
||||
pub struct DatabaseReplayGenerator {
|
||||
pub conn: Arc<turso_core::Connection>,
|
||||
pub opts: DatabaseReplaySessionOpts<Ctx>,
|
||||
pub opts: DatabaseReplaySessionOpts,
|
||||
}
|
||||
|
||||
pub struct ReplayInfo {
|
||||
@@ -24,8 +24,8 @@ pub struct ReplayInfo {
|
||||
}
|
||||
|
||||
const SQLITE_SCHEMA_TABLE: &str = "sqlite_schema";
|
||||
impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
pub fn new(conn: Arc<turso_core::Connection>, opts: DatabaseReplaySessionOpts<Ctx>) -> Self {
|
||||
impl DatabaseReplayGenerator {
|
||||
pub fn new(conn: Arc<turso_core::Connection>, opts: DatabaseReplaySessionOpts) -> Self {
|
||||
Self { conn, opts }
|
||||
}
|
||||
pub fn create_mutation(
|
||||
@@ -118,7 +118,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
}
|
||||
match change {
|
||||
DatabaseChangeType::Delete => {
|
||||
if self.opts.use_implicit_rowid {
|
||||
if self.opts.use_implicit_rowid || info.pk_column_indices.is_none() {
|
||||
vec![turso_core::Value::Integer(id)]
|
||||
} else {
|
||||
let mut values = Vec::new();
|
||||
@@ -168,7 +168,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
pub async fn replay_info(
|
||||
pub async fn replay_info<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
change: &DatabaseTapeRowChange,
|
||||
@@ -276,7 +276,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) async fn update_query(
|
||||
pub(crate) async fn update_query<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
table_name: &str,
|
||||
@@ -320,7 +320,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
is_ddl_replay: false,
|
||||
})
|
||||
}
|
||||
pub(crate) async fn insert_query(
|
||||
pub(crate) async fn insert_query<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
table_name: &str,
|
||||
@@ -371,7 +371,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
is_ddl_replay: false,
|
||||
})
|
||||
}
|
||||
pub(crate) async fn delete_query(
|
||||
pub(crate) async fn delete_query<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
table_name: &str,
|
||||
@@ -406,7 +406,7 @@ impl<Ctx> DatabaseReplayGenerator<Ctx> {
|
||||
})
|
||||
}
|
||||
|
||||
async fn table_columns_info(
|
||||
async fn table_columns_info<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
table_name: &str,
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
collections::{HashMap, HashSet},
|
||||
sync::Arc,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use turso_core::OpenFlags;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
database_replay_generator::DatabaseReplayGenerator,
|
||||
database_sync_operations::{
|
||||
bootstrap_db_file, connect_untracked, count_local_changes, fetch_last_change_id, has_table,
|
||||
push_logical_changes, read_wal_salt, reset_wal_file, update_last_change_id, wait_full_body,
|
||||
wal_apply_from_file, wal_pull_to_file, PAGE_SIZE, WAL_FRAME_HEADER, WAL_FRAME_SIZE,
|
||||
acquire_slot, apply_transformation, bootstrap_db_file, connect_untracked,
|
||||
count_local_changes, fetch_last_change_id, has_table, push_logical_changes, read_wal_salt,
|
||||
reset_wal_file, update_last_change_id, wait_all_results, wal_apply_from_file,
|
||||
wal_pull_to_file, PAGE_SIZE, WAL_FRAME_HEADER, WAL_FRAME_SIZE,
|
||||
},
|
||||
database_tape::{
|
||||
DatabaseChangesIteratorMode, DatabaseChangesIteratorOpts, DatabaseReplaySession,
|
||||
@@ -23,34 +23,24 @@ use crate::{
|
||||
io_operations::IoOperations,
|
||||
protocol_io::ProtocolIO,
|
||||
types::{
|
||||
Coro, DatabaseMetadata, DatabasePullRevision, DatabaseSyncEngineProtocolVersion,
|
||||
DatabaseTapeOperation, DbChangesStatus, SyncEngineStats, Transform,
|
||||
Coro, DatabaseMetadata, DatabasePullRevision, DatabaseRowTransformResult,
|
||||
DatabaseSyncEngineProtocolVersion, DatabaseTapeOperation, DbChangesStatus, SyncEngineStats,
|
||||
DATABASE_METADATA_VERSION,
|
||||
},
|
||||
wal_session::WalSession,
|
||||
Result,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DatabaseSyncEngineOpts<Ctx> {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DatabaseSyncEngineOpts {
|
||||
pub client_name: String,
|
||||
pub tables_ignore: Vec<String>,
|
||||
pub transform: Option<Transform<Ctx>>,
|
||||
pub use_transform: bool,
|
||||
pub wal_pull_batch_size: u64,
|
||||
pub protocol_version_hint: DatabaseSyncEngineProtocolVersion,
|
||||
}
|
||||
|
||||
impl<Ctx> std::fmt::Debug for DatabaseSyncEngineOpts<Ctx> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("DatabaseSyncEngineOpts")
|
||||
.field("client_name", &self.client_name)
|
||||
.field("tables_ignore", &self.tables_ignore)
|
||||
.field("transform.is_some()", &self.transform.is_some())
|
||||
.field("wal_pull_batch_size", &self.wal_pull_batch_size)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DatabaseSyncEngine<P: ProtocolIO, Ctx> {
|
||||
pub struct DatabaseSyncEngine<P: ProtocolIO> {
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
protocol: Arc<P>,
|
||||
db_file: Arc<dyn turso_core::DatabaseStorage>,
|
||||
@@ -59,7 +49,8 @@ pub struct DatabaseSyncEngine<P: ProtocolIO, Ctx> {
|
||||
revert_db_wal_path: String,
|
||||
main_db_path: String,
|
||||
meta_path: String,
|
||||
opts: DatabaseSyncEngineOpts<Ctx>,
|
||||
changes_file: Arc<Mutex<Option<Arc<dyn turso_core::File>>>>,
|
||||
opts: DatabaseSyncEngineOpts,
|
||||
meta: RefCell<DatabaseMetadata>,
|
||||
client_unique_id: String,
|
||||
}
|
||||
@@ -68,18 +59,19 @@ fn db_size_from_page(page: &[u8]) -> u32 {
|
||||
u32::from_be_bytes(page[28..28 + 4].try_into().unwrap())
|
||||
}
|
||||
|
||||
impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
impl<P: ProtocolIO> DatabaseSyncEngine<P> {
|
||||
/// Creates new instance of SyncEngine and initialize it immediately if no consistent local data exists
|
||||
pub async fn new(
|
||||
pub async fn new<Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
io: Arc<dyn turso_core::IO>,
|
||||
protocol: Arc<P>,
|
||||
main_db_path: &str,
|
||||
opts: DatabaseSyncEngineOpts<Ctx>,
|
||||
opts: DatabaseSyncEngineOpts,
|
||||
) -> Result<Self> {
|
||||
let main_db_wal_path = format!("{main_db_path}-wal");
|
||||
let revert_db_wal_path = format!("{main_db_path}-wal-revert");
|
||||
let meta_path = format!("{main_db_path}-info");
|
||||
let changes_path = format!("{main_db_path}-changes");
|
||||
|
||||
let db_file = io.open_file(main_db_path, turso_core::OpenFlags::Create, false)?;
|
||||
let db_file = Arc::new(turso_core::storage::database::DatabaseFile::new(db_file));
|
||||
@@ -87,7 +79,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
tracing::info!("init(path={}): opts={:?}", main_db_path, opts);
|
||||
|
||||
let completion = protocol.full_read(&meta_path)?;
|
||||
let data = wait_full_body(coro, &completion).await?;
|
||||
let data = wait_all_results(coro, &completion).await?;
|
||||
let meta = if data.is_empty() {
|
||||
None
|
||||
} else {
|
||||
@@ -107,21 +99,33 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
)
|
||||
.await?;
|
||||
let meta = DatabaseMetadata {
|
||||
version: DATABASE_METADATA_VERSION.to_string(),
|
||||
client_unique_id,
|
||||
synced_revision: Some(revision),
|
||||
revert_since_wal_salt: None,
|
||||
revert_since_wal_watermark: 0,
|
||||
last_pushed_change_id_hint: 0,
|
||||
last_pushed_pull_gen_hint: 0,
|
||||
last_pull_unix_time: io.now().secs,
|
||||
last_push_unix_time: None,
|
||||
};
|
||||
tracing::info!("write meta after successful bootstrap: meta={meta:?}");
|
||||
let completion = protocol.full_write(&meta_path, meta.dump()?)?;
|
||||
// todo: what happen if we will actually update the metadata on disk but fail and so in memory state will not be updated
|
||||
wait_full_body(coro, &completion).await?;
|
||||
wait_all_results(coro, &completion).await?;
|
||||
meta
|
||||
}
|
||||
};
|
||||
|
||||
if meta.version != DATABASE_METADATA_VERSION {
|
||||
return Err(Error::DatabaseSyncEngineError(format!(
|
||||
"unsupported metadata version: {}",
|
||||
meta.version
|
||||
)));
|
||||
}
|
||||
|
||||
tracing::info!("check if main db file exists");
|
||||
|
||||
let main_exists = io.try_open(main_db_path)?.is_some();
|
||||
if !main_exists {
|
||||
let error = "main DB file doesn't exists, but metadata is".to_string();
|
||||
@@ -140,8 +144,9 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
cdc_table: None,
|
||||
cdc_mode: Some("full".to_string()),
|
||||
};
|
||||
let main_tape = DatabaseTape::new_with_opts(main_db, tape_opts);
|
||||
tracing::info!("initialize database tape connection: path={}", main_db_path);
|
||||
let main_tape = DatabaseTape::new_with_opts(main_db, tape_opts);
|
||||
let changes_file = io.open_file(&changes_path, OpenFlags::Create, false)?;
|
||||
let mut db = Self {
|
||||
io,
|
||||
protocol,
|
||||
@@ -151,6 +156,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
revert_db_wal_path,
|
||||
main_db_path: main_db_path.to_string(),
|
||||
meta_path: format!("{main_db_path}-info"),
|
||||
changes_file: Arc::new(Mutex::new(Some(changes_file))),
|
||||
opts,
|
||||
meta: RefCell::new(meta.clone()),
|
||||
client_unique_id: meta.client_unique_id.clone(),
|
||||
@@ -163,10 +169,10 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
} = synced_revision
|
||||
{
|
||||
// sync WAL from the remote in case of bootstrap - all subsequent initializations will be fast
|
||||
if let Some(changes) = db.wait_changes_from_remote(coro).await? {
|
||||
db.apply_changes_from_remote(coro, changes).await?;
|
||||
}
|
||||
db.pull_changes_from_remote(coro).await?;
|
||||
}
|
||||
|
||||
tracing::info!("sync engine was initialized");
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
@@ -184,7 +190,10 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
async fn checkpoint_passive(&mut self, coro: &Coro<Ctx>) -> Result<(Option<Vec<u32>>, u64)> {
|
||||
async fn checkpoint_passive<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
) -> Result<(Option<Vec<u32>>, u64)> {
|
||||
let watermark = self.meta().revert_since_wal_watermark;
|
||||
tracing::info!(
|
||||
"checkpoint(path={:?}): revert_since_wal_watermark={}",
|
||||
@@ -232,16 +241,32 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
Ok((main_wal_salt, watermark))
|
||||
}
|
||||
|
||||
pub async fn stats(&self, coro: &Coro<Ctx>) -> Result<SyncEngineStats> {
|
||||
pub async fn stats<Ctx>(&self, coro: &Coro<Ctx>) -> Result<SyncEngineStats> {
|
||||
let main_conn = connect_untracked(&self.main_tape)?;
|
||||
let change_id = self.meta().last_pushed_change_id_hint;
|
||||
let last_pull_unix_time = self.meta().last_pull_unix_time;
|
||||
let last_push_unix_time = self.meta().last_push_unix_time;
|
||||
let revert_wal_path = &self.revert_db_wal_path;
|
||||
let revert_wal_file = self
|
||||
.io
|
||||
.open_file(revert_wal_path, OpenFlags::all(), false)?;
|
||||
let revert_wal_size = revert_wal_file.size()?;
|
||||
let main_wal_frames = main_conn.wal_state()?.max_frame;
|
||||
let main_wal_size = if main_wal_frames == 0 {
|
||||
0
|
||||
} else {
|
||||
WAL_FRAME_HEADER as u64 + WAL_FRAME_SIZE as u64 * main_wal_frames
|
||||
};
|
||||
Ok(SyncEngineStats {
|
||||
cdc_operations: count_local_changes(coro, &main_conn, change_id).await?,
|
||||
wal_size: main_conn.wal_state()?.max_frame as i64,
|
||||
main_wal_size,
|
||||
revert_wal_size,
|
||||
last_pull_unix_time,
|
||||
last_push_unix_time,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn checkpoint(&mut self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
pub async fn checkpoint<Ctx>(&mut self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
let (main_wal_salt, watermark) = self.checkpoint_passive(coro).await?;
|
||||
|
||||
let main_conn = connect_untracked(&self.main_tape)?;
|
||||
@@ -341,36 +366,32 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn wait_changes_from_remote(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
) -> Result<Option<DbChangesStatus>> {
|
||||
let file_path = format!("{}-frames-{}", self.main_db_path, Uuid::new_v4());
|
||||
tracing::info!(
|
||||
"wait_changes(path={}): file_path={}",
|
||||
self.main_db_path,
|
||||
file_path
|
||||
);
|
||||
let file = self.io.create(&file_path)?;
|
||||
pub async fn wait_changes_from_remote<Ctx>(&self, coro: &Coro<Ctx>) -> Result<DbChangesStatus> {
|
||||
tracing::info!("wait_changes(path={})", self.main_db_path);
|
||||
|
||||
let file = acquire_slot(&self.changes_file)?;
|
||||
|
||||
let now = self.io.now();
|
||||
let revision = self.meta().synced_revision.clone().unwrap();
|
||||
let next_revision = wal_pull_to_file(
|
||||
coro,
|
||||
self.protocol.as_ref(),
|
||||
file.clone(),
|
||||
&file.value,
|
||||
&revision,
|
||||
self.opts.wal_pull_batch_size,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if file.size()? == 0 {
|
||||
if file.value.size()? == 0 {
|
||||
tracing::info!(
|
||||
"wait_changes(path={}): no changes detected, removing changes file {}",
|
||||
self.main_db_path,
|
||||
file_path
|
||||
"wait_changes(path={}): no changes detected",
|
||||
self.main_db_path
|
||||
);
|
||||
self.io.remove_file(&file_path)?;
|
||||
return Ok(None);
|
||||
return Ok(DbChangesStatus {
|
||||
time: now,
|
||||
revision: next_revision,
|
||||
file_slot: None,
|
||||
});
|
||||
}
|
||||
|
||||
tracing::info!(
|
||||
@@ -380,26 +401,25 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
next_revision
|
||||
);
|
||||
|
||||
Ok(Some(DbChangesStatus {
|
||||
Ok(DbChangesStatus {
|
||||
time: now,
|
||||
revision: next_revision,
|
||||
file_path,
|
||||
}))
|
||||
file_slot: Some(file),
|
||||
})
|
||||
}
|
||||
|
||||
/// Sync all new changes from remote DB and apply them locally
|
||||
/// This method will **not** send local changed to the remote
|
||||
/// This method will block writes for the period of pull
|
||||
pub async fn apply_changes_from_remote(
|
||||
pub async fn apply_changes_from_remote<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
remote_changes: DbChangesStatus,
|
||||
now: turso_core::Instant,
|
||||
) -> Result<()> {
|
||||
let pull_result = self.apply_changes_internal(coro, &remote_changes).await;
|
||||
let cleanup_result: Result<()> = self
|
||||
.io
|
||||
.remove_file(&remote_changes.file_path)
|
||||
.inspect_err(|e| tracing::error!("failed to cleanup changes file: {e}"))
|
||||
.map_err(|e| e.into());
|
||||
assert!(remote_changes.file_slot.is_some(), "file_slot must be set");
|
||||
let changes_file = remote_changes.file_slot.as_ref().unwrap().value.clone();
|
||||
let pull_result = self.apply_changes_internal(coro, &changes_file).await;
|
||||
let Ok(revert_since_wal_watermark) = pull_result else {
|
||||
return Err(pull_result.err().unwrap());
|
||||
};
|
||||
@@ -411,34 +431,24 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
)?;
|
||||
reset_wal_file(coro, revert_wal_file, 0).await?;
|
||||
|
||||
self.update_meta(coro, |meta| {
|
||||
meta.revert_since_wal_watermark = revert_since_wal_watermark;
|
||||
meta.synced_revision = Some(remote_changes.revision);
|
||||
meta.last_pushed_change_id_hint = 0;
|
||||
self.update_meta(coro, |m| {
|
||||
m.revert_since_wal_watermark = revert_since_wal_watermark;
|
||||
m.synced_revision = Some(remote_changes.revision);
|
||||
m.last_pushed_change_id_hint = 0;
|
||||
m.last_pull_unix_time = now.secs;
|
||||
})
|
||||
.await?;
|
||||
|
||||
cleanup_result
|
||||
Ok(())
|
||||
}
|
||||
async fn apply_changes_internal(
|
||||
async fn apply_changes_internal<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
remote_changes: &DbChangesStatus,
|
||||
changes_file: &Arc<dyn turso_core::File>,
|
||||
) -> Result<u64> {
|
||||
tracing::info!(
|
||||
"apply_changes(path={}, changes={:?})",
|
||||
self.main_db_path,
|
||||
remote_changes
|
||||
);
|
||||
tracing::info!("apply_changes(path={})", self.main_db_path);
|
||||
|
||||
let (_, watermark) = self.checkpoint_passive(coro).await?;
|
||||
|
||||
let changes_file = self.io.open_file(
|
||||
&remote_changes.file_path,
|
||||
turso_core::OpenFlags::empty(),
|
||||
false,
|
||||
)?;
|
||||
|
||||
let revert_conn = self.open_revert_db_conn()?;
|
||||
let main_conn = connect_untracked(&self.main_tape)?;
|
||||
|
||||
@@ -476,6 +486,9 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
let mut iterator = self.main_tape.iterate_changes(iterate_opts)?;
|
||||
while let Some(operation) = iterator.next(coro).await? {
|
||||
match operation {
|
||||
DatabaseTapeOperation::StmtReplay(_) => {
|
||||
panic!("changes iterator must not use StmtReplay option")
|
||||
}
|
||||
DatabaseTapeOperation::RowChange(change) => local_changes.push(change),
|
||||
DatabaseTapeOperation::Commit => continue,
|
||||
}
|
||||
@@ -553,18 +566,46 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
cached_insert_stmt: HashMap::new(),
|
||||
cached_update_stmt: HashMap::new(),
|
||||
in_txn: true,
|
||||
generator: DatabaseReplayGenerator::<Ctx> {
|
||||
generator: DatabaseReplayGenerator {
|
||||
conn: main_conn.clone(),
|
||||
opts: DatabaseReplaySessionOpts::<Ctx> {
|
||||
opts: DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: self.opts.transform.clone(),
|
||||
},
|
||||
},
|
||||
};
|
||||
for change in local_changes {
|
||||
let operation = DatabaseTapeOperation::RowChange(change);
|
||||
|
||||
let mut transformed = if self.opts.use_transform {
|
||||
Some(
|
||||
apply_transformation(
|
||||
&coro,
|
||||
self.protocol.as_ref(),
|
||||
&local_changes,
|
||||
&replay.generator,
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
assert!(!replay.conn().get_auto_commit());
|
||||
for (i, change) in local_changes.into_iter().enumerate() {
|
||||
let operation = if let Some(transformed) = &mut transformed {
|
||||
match std::mem::replace(&mut transformed[i], DatabaseRowTransformResult::Skip) {
|
||||
DatabaseRowTransformResult::Keep => {
|
||||
DatabaseTapeOperation::RowChange(change)
|
||||
}
|
||||
DatabaseRowTransformResult::Skip => continue,
|
||||
DatabaseRowTransformResult::Rewrite(replay) => {
|
||||
DatabaseTapeOperation::StmtReplay(replay)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DatabaseTapeOperation::RowChange(change)
|
||||
};
|
||||
replay.replay(coro, operation).await?;
|
||||
}
|
||||
assert!(!replay.conn().get_auto_commit());
|
||||
|
||||
main_session.wal_session.end(true)?;
|
||||
}
|
||||
@@ -575,7 +616,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
/// Sync local changes to remote DB
|
||||
/// This method will **not** pull remote changes to the local DB
|
||||
/// This method will **not** block writes for the period of sync
|
||||
pub async fn push_changes_to_remote(&self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
pub async fn push_changes_to_remote<Ctx>(&self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
tracing::info!("push_changes(path={})", self.main_db_path);
|
||||
|
||||
let (_, change_id) = push_logical_changes(
|
||||
@@ -589,6 +630,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
|
||||
self.update_meta(coro, |m| {
|
||||
m.last_pushed_change_id_hint = change_id;
|
||||
m.last_push_unix_time = Some(self.io.now().secs);
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -596,7 +638,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
}
|
||||
|
||||
/// Create read/write database connection and appropriately configure it before use
|
||||
pub async fn connect_rw(&self, coro: &Coro<Ctx>) -> Result<Arc<turso_core::Connection>> {
|
||||
pub async fn connect_rw<Ctx>(&self, coro: &Coro<Ctx>) -> Result<Arc<turso_core::Connection>> {
|
||||
let conn = self.main_tape.connect(coro).await?;
|
||||
conn.wal_auto_checkpoint_disable();
|
||||
Ok(conn)
|
||||
@@ -604,12 +646,24 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
|
||||
/// Sync local changes to remote DB and bring new changes from remote to local
|
||||
/// This method will block writes for the period of sync
|
||||
pub async fn sync(&mut self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
pub async fn sync<Ctx>(&mut self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
// todo(sivukhin): this is bit suboptimal as both 'push' and 'pull' will call pull_synced_from_remote
|
||||
// but for now - keep it simple
|
||||
self.push_changes_to_remote(coro).await?;
|
||||
if let Some(changes) = self.wait_changes_from_remote(coro).await? {
|
||||
self.apply_changes_from_remote(coro, changes).await?;
|
||||
self.pull_changes_from_remote(coro).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn pull_changes_from_remote<Ctx>(&mut self, coro: &Coro<Ctx>) -> Result<()> {
|
||||
let now = self.io.now();
|
||||
let changes = self.wait_changes_from_remote(coro).await?;
|
||||
if changes.file_slot.is_some() {
|
||||
self.apply_changes_from_remote(coro, changes, now).await?;
|
||||
} else {
|
||||
self.update_meta(coro, |m| {
|
||||
m.last_pull_unix_time = now.secs;
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -618,7 +672,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
self.meta.borrow()
|
||||
}
|
||||
|
||||
async fn update_meta(
|
||||
async fn update_meta<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
update: impl FnOnce(&mut DatabaseMetadata),
|
||||
@@ -628,7 +682,7 @@ impl<P: ProtocolIO, Ctx> DatabaseSyncEngine<P, Ctx> {
|
||||
tracing::info!("update_meta: {meta:?}");
|
||||
let completion = self.protocol.full_write(&self.meta_path, meta.dump()?)?;
|
||||
// todo: what happen if we will actually update the metadata on disk but fail and so in memory state will not be updated
|
||||
wait_full_body(coro, &completion).await?;
|
||||
wait_all_results(coro, &completion).await?;
|
||||
self.meta.replace(meta);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use bytes::BytesMut;
|
||||
use prost::Message;
|
||||
@@ -22,8 +22,9 @@ use crate::{
|
||||
PullUpdatesRespProtoBody, Stmt, StmtResult, StreamRequest,
|
||||
},
|
||||
types::{
|
||||
Coro, DatabasePullRevision, DatabaseSyncEngineProtocolVersion, DatabaseTapeOperation,
|
||||
DatabaseTapeRowChangeType, DbSyncInfo, DbSyncStatus, ProtocolCommand,
|
||||
Coro, DatabasePullRevision, DatabaseRowTransformResult, DatabaseSyncEngineProtocolVersion,
|
||||
DatabaseTapeOperation, DatabaseTapeRowChange, DatabaseTapeRowChangeType, DbSyncInfo,
|
||||
DbSyncStatus, ProtocolCommand,
|
||||
},
|
||||
wal_session::WalSession,
|
||||
Result,
|
||||
@@ -34,7 +35,30 @@ pub const WAL_FRAME_HEADER: usize = 24;
|
||||
pub const PAGE_SIZE: usize = 4096;
|
||||
pub const WAL_FRAME_SIZE: usize = WAL_FRAME_HEADER + PAGE_SIZE;
|
||||
|
||||
enum WalHttpPullResult<C: DataCompletion> {
|
||||
pub struct MutexSlot<T: Clone> {
|
||||
pub value: T,
|
||||
pub slot: Arc<Mutex<Option<T>>>,
|
||||
}
|
||||
|
||||
impl<T: Clone> Drop for MutexSlot<T> {
|
||||
fn drop(&mut self) {
|
||||
self.slot.lock().unwrap().replace(self.value.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn acquire_slot<T: Clone>(slot: &Arc<Mutex<Option<T>>>) -> Result<MutexSlot<T>> {
|
||||
let Some(value) = slot.lock().unwrap().take() else {
|
||||
return Err(Error::DatabaseSyncEngineError(
|
||||
"changes file already acquired by another operation".to_string(),
|
||||
));
|
||||
};
|
||||
Ok(MutexSlot {
|
||||
value,
|
||||
slot: slot.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
enum WalHttpPullResult<C: DataCompletion<u8>> {
|
||||
Frames(C),
|
||||
NeedCheckpoint(DbSyncStatus),
|
||||
}
|
||||
@@ -56,10 +80,10 @@ pub async fn db_bootstrap<C: ProtocolIO, Ctx>(
|
||||
client: &C,
|
||||
db: Arc<dyn turso_core::File>,
|
||||
) -> Result<DbSyncInfo> {
|
||||
tracing::debug!("db_bootstrap");
|
||||
tracing::info!("db_bootstrap");
|
||||
let start_time = std::time::Instant::now();
|
||||
let db_info = db_info_http(coro, client).await?;
|
||||
tracing::debug!("db_bootstrap: fetched db_info={db_info:?}");
|
||||
tracing::info!("db_bootstrap: fetched db_info={db_info:?}");
|
||||
let content = db_bootstrap_http(coro, client, db_info.current_generation).await?;
|
||||
let mut pos = 0;
|
||||
loop {
|
||||
@@ -99,21 +123,21 @@ pub async fn db_bootstrap<C: ProtocolIO, Ctx>(
|
||||
}
|
||||
|
||||
let elapsed = std::time::Instant::now().duration_since(start_time);
|
||||
tracing::debug!("db_bootstrap: finished: bytes={pos}, elapsed={:?}", elapsed);
|
||||
tracing::info!("db_bootstrap: finished: bytes={pos}, elapsed={:?}", elapsed);
|
||||
|
||||
Ok(db_info)
|
||||
}
|
||||
|
||||
pub async fn wal_apply_from_file<Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
frames_file: Arc<dyn turso_core::File>,
|
||||
frames_file: &Arc<dyn turso_core::File>,
|
||||
session: &mut DatabaseWalSession,
|
||||
) -> Result<u32> {
|
||||
let size = frames_file.size()?;
|
||||
assert!(size % WAL_FRAME_SIZE as u64 == 0);
|
||||
#[allow(clippy::arc_with_non_send_sync)]
|
||||
let buffer = Arc::new(Buffer::new_temporary(WAL_FRAME_SIZE));
|
||||
tracing::debug!("wal_apply_from_file: size={}", size);
|
||||
tracing::info!("wal_apply_from_file: size={}", size);
|
||||
let mut db_size = 0;
|
||||
for offset in (0..size).step_by(WAL_FRAME_SIZE) {
|
||||
let c = Completion::new_read(buffer.clone(), move |result| {
|
||||
@@ -139,10 +163,21 @@ pub async fn wal_apply_from_file<Ctx>(
|
||||
pub async fn wal_pull_to_file<C: ProtocolIO, Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
client: &C,
|
||||
frames_file: Arc<dyn turso_core::File>,
|
||||
frames_file: &Arc<dyn turso_core::File>,
|
||||
revision: &DatabasePullRevision,
|
||||
wal_pull_batch_size: u64,
|
||||
) -> Result<DatabasePullRevision> {
|
||||
// truncate file before pulling new data
|
||||
let c = Completion::new_trunc(move |result| {
|
||||
let Ok(rc) = result else {
|
||||
return;
|
||||
};
|
||||
assert!(rc as usize == 0);
|
||||
});
|
||||
let c = frames_file.truncate(0, c)?;
|
||||
while !c.is_completed() {
|
||||
coro.yield_(ProtocolCommand::IO).await?;
|
||||
}
|
||||
match revision {
|
||||
DatabasePullRevision::Legacy {
|
||||
generation,
|
||||
@@ -169,7 +204,7 @@ pub async fn wal_pull_to_file<C: ProtocolIO, Ctx>(
|
||||
pub async fn wal_pull_to_file_v1<C: ProtocolIO, Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
client: &C,
|
||||
frames_file: Arc<dyn turso_core::File>,
|
||||
frames_file: &Arc<dyn turso_core::File>,
|
||||
revision: &str,
|
||||
) -> Result<DatabasePullRevision> {
|
||||
tracing::info!("wal_pull: revision={revision}");
|
||||
@@ -263,7 +298,7 @@ pub async fn wal_pull_to_file_v1<C: ProtocolIO, Ctx>(
|
||||
pub async fn wal_pull_to_file_legacy<C: ProtocolIO, Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
client: &C,
|
||||
frames_file: Arc<dyn turso_core::File>,
|
||||
frames_file: &Arc<dyn turso_core::File>,
|
||||
mut generation: u64,
|
||||
mut start_frame: u64,
|
||||
wal_pull_batch_size: u64,
|
||||
@@ -284,9 +319,9 @@ pub async fn wal_pull_to_file_legacy<C: ProtocolIO, Ctx>(
|
||||
let data = match result {
|
||||
WalHttpPullResult::NeedCheckpoint(status) => {
|
||||
assert!(status.status == "checkpoint_needed");
|
||||
tracing::debug!("wal_pull: need checkpoint: status={status:?}");
|
||||
tracing::info!("wal_pull: need checkpoint: status={status:?}");
|
||||
if status.generation == generation && status.max_frame_no < start_frame {
|
||||
tracing::debug!("wal_pull: end of history: status={:?}", status);
|
||||
tracing::info!("wal_pull: end of history: status={:?}", status);
|
||||
break DatabasePullRevision::Legacy {
|
||||
generation: status.generation,
|
||||
synced_frame_no: Some(status.max_frame_no),
|
||||
@@ -655,7 +690,7 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
client: &C,
|
||||
source: &DatabaseTape,
|
||||
client_id: &str,
|
||||
opts: &DatabaseSyncEngineOpts<Ctx>,
|
||||
opts: &DatabaseSyncEngineOpts,
|
||||
) -> Result<(i64, i64)> {
|
||||
tracing::info!("push_logical_changes: client_id={client_id}");
|
||||
let source_conn = connect_untracked(source)?;
|
||||
@@ -666,7 +701,6 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
tracing::debug!("push_logical_changes: last_change_id={:?}", last_change_id);
|
||||
let replay_opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
|
||||
let generator = DatabaseReplayGenerator::new(source_conn, replay_opts);
|
||||
@@ -697,15 +731,13 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
];
|
||||
let mut rows_changed = 0;
|
||||
let mut changes = source.iterate_changes(iterate_opts)?;
|
||||
let mut local_changes = Vec::new();
|
||||
while let Some(operation) = changes.next(coro).await? {
|
||||
match operation {
|
||||
DatabaseTapeOperation::StmtReplay(_) => {
|
||||
panic!("changes iterator must not use StmtReplay option")
|
||||
}
|
||||
DatabaseTapeOperation::RowChange(change) => {
|
||||
assert!(
|
||||
last_change_id.is_none() || last_change_id.unwrap() < change.change_id,
|
||||
"change id must be strictly increasing: last_change_id={:?}, change.change_id={}",
|
||||
last_change_id,
|
||||
change.change_id
|
||||
);
|
||||
if change.table_name == TURSO_SYNC_TABLE_NAME {
|
||||
continue;
|
||||
}
|
||||
@@ -713,38 +745,68 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
if ignore.iter().any(|x| &change.table_name == x) {
|
||||
continue;
|
||||
}
|
||||
local_changes.push(change);
|
||||
}
|
||||
DatabaseTapeOperation::Commit => continue,
|
||||
}
|
||||
}
|
||||
|
||||
let mut transformed = if opts.use_transform {
|
||||
Some(apply_transformation(&coro, client, &local_changes, &generator).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
tracing::info!("local_changes: {:?}", local_changes);
|
||||
|
||||
for (i, change) in local_changes.into_iter().enumerate() {
|
||||
let change_id = change.change_id;
|
||||
let operation = if let Some(transformed) = &mut transformed {
|
||||
match std::mem::replace(&mut transformed[i], DatabaseRowTransformResult::Skip) {
|
||||
DatabaseRowTransformResult::Keep => DatabaseTapeOperation::RowChange(change),
|
||||
DatabaseRowTransformResult::Skip => continue,
|
||||
DatabaseRowTransformResult::Rewrite(replay) => {
|
||||
DatabaseTapeOperation::StmtReplay(replay)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
DatabaseTapeOperation::RowChange(change)
|
||||
};
|
||||
tracing::info!(
|
||||
"change_id: {}, last_change_id: {:?}",
|
||||
change_id,
|
||||
last_change_id
|
||||
);
|
||||
assert!(
|
||||
last_change_id.is_none() || last_change_id.unwrap() < change_id,
|
||||
"change id must be strictly increasing: last_change_id={:?}, change.change_id={}",
|
||||
last_change_id,
|
||||
change_id
|
||||
);
|
||||
rows_changed += 1;
|
||||
// we give user full control over CDC table - so let's not emit assert here for now
|
||||
if last_change_id.is_some() && last_change_id.unwrap() + 1 != change.change_id {
|
||||
if last_change_id.is_some() && last_change_id.unwrap() + 1 != change_id {
|
||||
tracing::warn!(
|
||||
"out of order change sequence: {} -> {}",
|
||||
last_change_id.unwrap(),
|
||||
change.change_id
|
||||
change_id
|
||||
);
|
||||
}
|
||||
last_change_id = Some(change.change_id);
|
||||
let replay_info = generator.replay_info(coro, &change).await?;
|
||||
if !replay_info.is_ddl_replay {
|
||||
if let Some(transform) = &opts.transform {
|
||||
let mutation = generator.create_mutation(&replay_info, &change)?;
|
||||
if let Some(statement) = transform(&coro.ctx.borrow(), mutation)? {
|
||||
tracing::info!(
|
||||
"push_logical_changes: use mutation from custom transformer: sql={}, values={:?}",
|
||||
statement.sql,
|
||||
statement.values
|
||||
);
|
||||
sql_over_http_requests.push(Stmt {
|
||||
sql: Some(statement.sql),
|
||||
last_change_id = Some(change_id);
|
||||
match operation {
|
||||
DatabaseTapeOperation::Commit => {
|
||||
panic!("Commit operation must not be emited at this stage")
|
||||
}
|
||||
DatabaseTapeOperation::StmtReplay(replay) => sql_over_http_requests.push(Stmt {
|
||||
sql: Some(replay.sql),
|
||||
sql_id: None,
|
||||
args: convert_to_args(statement.values),
|
||||
args: convert_to_args(replay.values),
|
||||
named_args: Vec::new(),
|
||||
want_rows: Some(false),
|
||||
replication_index: None,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
DatabaseTapeOperation::RowChange(change) => {
|
||||
let replay_info = generator.replay_info(coro, &change).await?;
|
||||
let change_type = (&change.change).into();
|
||||
match change.change {
|
||||
DatabaseTapeRowChangeType::Delete { before } => {
|
||||
@@ -825,7 +887,9 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
}
|
||||
}
|
||||
}
|
||||
DatabaseTapeOperation::Commit => {
|
||||
}
|
||||
}
|
||||
|
||||
if rows_changed > 0 {
|
||||
tracing::info!("prepare update stmt for turso_sync_last_change_id table with client_id={} and last_change_id={:?}", client_id, last_change_id);
|
||||
// update turso_sync_last_change_id table with new value before commit
|
||||
@@ -858,9 +922,6 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
want_rows: Some(false),
|
||||
replication_index: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracing::trace!("hrana request: {:?}", sql_over_http_requests);
|
||||
let replay_hrana_request = server_proto::PipelineReqBody {
|
||||
@@ -876,6 +937,30 @@ pub async fn push_logical_changes<C: ProtocolIO, Ctx>(
|
||||
Ok((source_pull_gen, last_change_id.unwrap_or(0)))
|
||||
}
|
||||
|
||||
pub async fn apply_transformation<Ctx, P: ProtocolIO>(
|
||||
coro: &Coro<Ctx>,
|
||||
client: &P,
|
||||
changes: &Vec<DatabaseTapeRowChange>,
|
||||
generator: &DatabaseReplayGenerator,
|
||||
) -> Result<Vec<DatabaseRowTransformResult>> {
|
||||
let mut mutations = Vec::new();
|
||||
for change in changes {
|
||||
let replay_info = generator.replay_info(&coro, &change).await?;
|
||||
mutations.push(generator.create_mutation(&replay_info, &change)?);
|
||||
}
|
||||
let completion = client.transform(mutations)?;
|
||||
let transformed = wait_all_results(&coro, &completion).await?;
|
||||
if transformed.len() != changes.len() {
|
||||
return Err(Error::DatabaseSyncEngineError(format!(
|
||||
"unexpected result from custom transformation: mismatch in shapes: {} != {}",
|
||||
transformed.len(),
|
||||
changes.len()
|
||||
)));
|
||||
}
|
||||
tracing::info!("apply_transformation: got {:?}", transformed);
|
||||
Ok(transformed)
|
||||
}
|
||||
|
||||
pub async fn read_wal_salt<Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
wal: &Arc<dyn turso_core::File>,
|
||||
@@ -1102,7 +1187,7 @@ async fn sql_execute_http<C: ProtocolIO, Ctx>(
|
||||
let error = format!("sql_execute_http: unexpected status code: {status}");
|
||||
return Err(Error::DatabaseSyncEngineError(error));
|
||||
}
|
||||
let response = wait_full_body(coro, &completion).await?;
|
||||
let response = wait_all_results(coro, &completion).await?;
|
||||
let response: server_proto::PipelineRespBody = serde_json::from_slice(&response)?;
|
||||
tracing::debug!("hrana response: {:?}", response);
|
||||
let mut results = Vec::new();
|
||||
@@ -1134,7 +1219,7 @@ async fn wal_pull_http<C: ProtocolIO, Ctx>(
|
||||
generation: u64,
|
||||
start_frame: u64,
|
||||
end_frame: u64,
|
||||
) -> Result<WalHttpPullResult<C::DataCompletion>> {
|
||||
) -> Result<WalHttpPullResult<C::DataCompletionBytes>> {
|
||||
let completion = client.http(
|
||||
"GET",
|
||||
&format!("/sync/{generation}/{start_frame}/{end_frame}"),
|
||||
@@ -1143,7 +1228,7 @@ async fn wal_pull_http<C: ProtocolIO, Ctx>(
|
||||
)?;
|
||||
let status = wait_status(coro, &completion).await?;
|
||||
if status == http::StatusCode::BAD_REQUEST {
|
||||
let status_body = wait_full_body(coro, &completion).await?;
|
||||
let status_body = wait_all_results(coro, &completion).await?;
|
||||
let status: DbSyncStatus = serde_json::from_slice(&status_body)?;
|
||||
if status.status == "checkpoint_needed" {
|
||||
return Ok(WalHttpPullResult::NeedCheckpoint(status));
|
||||
@@ -1178,7 +1263,7 @@ async fn wal_push_http<C: ProtocolIO, Ctx>(
|
||||
&[],
|
||||
)?;
|
||||
let status = wait_status(coro, &completion).await?;
|
||||
let status_body = wait_full_body(coro, &completion).await?;
|
||||
let status_body = wait_all_results(coro, &completion).await?;
|
||||
if status != http::StatusCode::OK {
|
||||
let error = std::str::from_utf8(&status_body).ok().unwrap_or("");
|
||||
return Err(Error::DatabaseSyncEngineError(format!(
|
||||
@@ -1191,7 +1276,7 @@ async fn wal_push_http<C: ProtocolIO, Ctx>(
|
||||
async fn db_info_http<C: ProtocolIO, Ctx>(coro: &Coro<Ctx>, client: &C) -> Result<DbSyncInfo> {
|
||||
let completion = client.http("GET", "/info", None, &[])?;
|
||||
let status = wait_status(coro, &completion).await?;
|
||||
let status_body = wait_full_body(coro, &completion).await?;
|
||||
let status_body = wait_all_results(coro, &completion).await?;
|
||||
if status != http::StatusCode::OK {
|
||||
return Err(Error::DatabaseSyncEngineError(format!(
|
||||
"db_info go unexpected status: {status}"
|
||||
@@ -1204,7 +1289,7 @@ async fn db_bootstrap_http<C: ProtocolIO, Ctx>(
|
||||
coro: &Coro<Ctx>,
|
||||
client: &C,
|
||||
generation: u64,
|
||||
) -> Result<C::DataCompletion> {
|
||||
) -> Result<C::DataCompletionBytes> {
|
||||
let completion = client.http("GET", &format!("/export/{generation}"), None, &[])?;
|
||||
let status = wait_status(coro, &completion).await?;
|
||||
if status != http::StatusCode::OK.as_u16() {
|
||||
@@ -1215,7 +1300,10 @@ async fn db_bootstrap_http<C: ProtocolIO, Ctx>(
|
||||
Ok(completion)
|
||||
}
|
||||
|
||||
pub async fn wait_status<Ctx>(coro: &Coro<Ctx>, completion: &impl DataCompletion) -> Result<u16> {
|
||||
pub async fn wait_status<Ctx, T>(
|
||||
coro: &Coro<Ctx>,
|
||||
completion: &impl DataCompletion<T>,
|
||||
) -> Result<u16> {
|
||||
while completion.status()?.is_none() {
|
||||
coro.yield_(ProtocolCommand::IO).await?;
|
||||
}
|
||||
@@ -1244,7 +1332,7 @@ pub fn read_varint(buf: &[u8]) -> Result<Option<(usize, usize)>> {
|
||||
|
||||
pub async fn wait_proto_message<Ctx, T: prost::Message + Default>(
|
||||
coro: &Coro<Ctx>,
|
||||
completion: &impl DataCompletion,
|
||||
completion: &impl DataCompletion<u8>,
|
||||
bytes: &mut BytesMut,
|
||||
) -> Result<Option<T>> {
|
||||
let start_time = std::time::Instant::now();
|
||||
@@ -1281,21 +1369,21 @@ pub async fn wait_proto_message<Ctx, T: prost::Message + Default>(
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wait_full_body<Ctx>(
|
||||
pub async fn wait_all_results<Ctx, T: Clone>(
|
||||
coro: &Coro<Ctx>,
|
||||
completion: &impl DataCompletion,
|
||||
) -> Result<Vec<u8>> {
|
||||
let mut bytes = Vec::new();
|
||||
completion: &impl DataCompletion<T>,
|
||||
) -> Result<Vec<T>> {
|
||||
let mut results = Vec::new();
|
||||
loop {
|
||||
while let Some(poll) = completion.poll_data()? {
|
||||
bytes.extend_from_slice(poll.data());
|
||||
results.extend_from_slice(poll.data());
|
||||
}
|
||||
if completion.is_done()? {
|
||||
break;
|
||||
}
|
||||
coro.yield_(ProtocolCommand::IO).await?;
|
||||
}
|
||||
Ok(bytes)
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1315,7 +1403,7 @@ mod tests {
|
||||
|
||||
struct TestPollResult(Vec<u8>);
|
||||
|
||||
impl DataPollResult for TestPollResult {
|
||||
impl DataPollResult<u8> for TestPollResult {
|
||||
fn data(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
@@ -1326,9 +1414,8 @@ mod tests {
|
||||
chunk: usize,
|
||||
}
|
||||
|
||||
impl DataCompletion for TestCompletion {
|
||||
impl DataCompletion<u8> for TestCompletion {
|
||||
type DataPollResult = TestPollResult;
|
||||
|
||||
fn status(&self) -> crate::Result<Option<u16>> {
|
||||
Ok(Some(200))
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::{
|
||||
errors::Error,
|
||||
types::{
|
||||
Coro, DatabaseChange, DatabaseTapeOperation, DatabaseTapeRowChange,
|
||||
DatabaseTapeRowChangeType, ProtocolCommand, Transform,
|
||||
DatabaseTapeRowChangeType, ProtocolCommand,
|
||||
},
|
||||
wal_session::WalSession,
|
||||
Result,
|
||||
@@ -169,8 +169,8 @@ impl DatabaseTape {
|
||||
pub async fn start_replay_session<Ctx>(
|
||||
&self,
|
||||
coro: &Coro<Ctx>,
|
||||
opts: DatabaseReplaySessionOpts<Ctx>,
|
||||
) -> Result<DatabaseReplaySession<Ctx>> {
|
||||
opts: DatabaseReplaySessionOpts,
|
||||
) -> Result<DatabaseReplaySession> {
|
||||
tracing::debug!("opening replay session");
|
||||
let conn = self.connect(coro).await?;
|
||||
conn.execute("BEGIN IMMEDIATE")?;
|
||||
@@ -431,16 +431,14 @@ impl DatabaseChangesIterator {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DatabaseReplaySessionOpts<Ctx = ()> {
|
||||
pub struct DatabaseReplaySessionOpts {
|
||||
pub use_implicit_rowid: bool,
|
||||
pub transform: Option<Transform<Ctx>>,
|
||||
}
|
||||
|
||||
impl<Ctx> std::fmt::Debug for DatabaseReplaySessionOpts<Ctx> {
|
||||
impl std::fmt::Debug for DatabaseReplaySessionOpts {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("DatabaseReplaySessionOpts")
|
||||
.field("use_implicit_rowid", &self.use_implicit_rowid)
|
||||
.field("transform_mutation.is_some()", &self.transform.is_some())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
@@ -450,13 +448,13 @@ pub(crate) struct CachedStmt {
|
||||
info: ReplayInfo,
|
||||
}
|
||||
|
||||
pub struct DatabaseReplaySession<Ctx = ()> {
|
||||
pub struct DatabaseReplaySession {
|
||||
pub(crate) conn: Arc<turso_core::Connection>,
|
||||
pub(crate) cached_delete_stmt: HashMap<String, CachedStmt>,
|
||||
pub(crate) cached_insert_stmt: HashMap<(String, usize), CachedStmt>,
|
||||
pub(crate) cached_update_stmt: HashMap<(String, Vec<bool>), CachedStmt>,
|
||||
pub(crate) in_txn: bool,
|
||||
pub(crate) generator: DatabaseReplayGenerator<Ctx>,
|
||||
pub(crate) generator: DatabaseReplayGenerator,
|
||||
}
|
||||
|
||||
async fn replay_stmt<Ctx>(
|
||||
@@ -472,11 +470,11 @@ async fn replay_stmt<Ctx>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
impl DatabaseReplaySession {
|
||||
pub fn conn(&self) -> Arc<turso_core::Connection> {
|
||||
self.conn.clone()
|
||||
}
|
||||
pub async fn replay(
|
||||
pub async fn replay<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
operation: DatabaseTapeOperation,
|
||||
@@ -489,6 +487,11 @@ impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
self.in_txn = false;
|
||||
}
|
||||
}
|
||||
DatabaseTapeOperation::StmtReplay(replay) => {
|
||||
let mut stmt = self.conn.prepare(&replay.sql)?;
|
||||
replay_stmt(coro, &mut stmt, replay.values).await?;
|
||||
return Ok(());
|
||||
}
|
||||
DatabaseTapeOperation::RowChange(change) => {
|
||||
if !self.in_txn {
|
||||
tracing::trace!("replay: start txn for replaying changes");
|
||||
@@ -502,21 +505,6 @@ impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
let replay_info = self.generator.replay_info(coro, &change).await?;
|
||||
self.conn.execute(replay_info.query.as_str())?;
|
||||
} else {
|
||||
if let Some(transform) = &self.generator.opts.transform {
|
||||
let replay_info = self.generator.replay_info(coro, &change).await?;
|
||||
let mutation = self.generator.create_mutation(&replay_info, &change)?;
|
||||
let statement = transform(&coro.ctx.borrow(), mutation)?;
|
||||
if let Some(statement) = statement {
|
||||
tracing::info!(
|
||||
"replay: use mutation from custom transformer: sql={}, values={:?}",
|
||||
statement.sql,
|
||||
statement.values
|
||||
);
|
||||
let mut stmt = self.conn.prepare(&statement.sql)?;
|
||||
replay_stmt(coro, &mut stmt, statement.values).await?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
match change.change {
|
||||
DatabaseTapeRowChangeType::Delete { before } => {
|
||||
let key = self.populate_delete_stmt(coro, table).await?;
|
||||
@@ -625,7 +613,7 @@ impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
async fn populate_delete_stmt<'a>(
|
||||
async fn populate_delete_stmt<'a, Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
table: &'a str,
|
||||
@@ -640,7 +628,7 @@ impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
.insert(table.to_string(), CachedStmt { stmt, info });
|
||||
Ok(table)
|
||||
}
|
||||
async fn populate_insert_stmt(
|
||||
async fn populate_insert_stmt<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
table: &str,
|
||||
@@ -661,7 +649,7 @@ impl<Ctx> DatabaseReplaySession<Ctx> {
|
||||
.insert(key.clone(), CachedStmt { stmt, info });
|
||||
Ok(key)
|
||||
}
|
||||
async fn populate_update_stmt(
|
||||
async fn populate_update_stmt<Ctx>(
|
||||
&mut self,
|
||||
coro: &Coro<Ctx>,
|
||||
table: &str,
|
||||
@@ -822,7 +810,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: true,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db2.start_replay_session(&coro, opts).await.unwrap();
|
||||
let opts = Default::default();
|
||||
@@ -902,7 +889,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db2.start_replay_session(&coro, opts).await.unwrap();
|
||||
let opts = Default::default();
|
||||
@@ -973,7 +959,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db2.start_replay_session(&coro, opts).await.unwrap();
|
||||
let opts = Default::default();
|
||||
@@ -1048,7 +1033,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db3.start_replay_session(&coro, opts).await.unwrap();
|
||||
|
||||
@@ -1166,7 +1150,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db2.start_replay_session(&coro, opts).await.unwrap();
|
||||
|
||||
@@ -1252,7 +1235,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db2.start_replay_session(&coro, opts).await.unwrap();
|
||||
|
||||
@@ -1349,7 +1331,6 @@ mod tests {
|
||||
{
|
||||
let opts = DatabaseReplaySessionOpts {
|
||||
use_implicit_rowid: false,
|
||||
transform: None,
|
||||
};
|
||||
let mut session = db3.start_replay_session(&coro, opts).await.unwrap();
|
||||
|
||||
|
||||
@@ -1,25 +1,33 @@
|
||||
use crate::Result;
|
||||
use crate::{
|
||||
types::{DatabaseRowMutation, DatabaseRowTransformResult},
|
||||
Result,
|
||||
};
|
||||
|
||||
pub trait DataPollResult {
|
||||
fn data(&self) -> &[u8];
|
||||
pub trait DataPollResult<T> {
|
||||
fn data(&self) -> &[T];
|
||||
}
|
||||
|
||||
pub trait DataCompletion {
|
||||
type DataPollResult: DataPollResult;
|
||||
pub trait DataCompletion<T> {
|
||||
type DataPollResult: DataPollResult<T>;
|
||||
fn status(&self) -> Result<Option<u16>>;
|
||||
fn poll_data(&self) -> Result<Option<Self::DataPollResult>>;
|
||||
fn is_done(&self) -> Result<bool>;
|
||||
}
|
||||
|
||||
pub trait ProtocolIO {
|
||||
type DataCompletion: DataCompletion;
|
||||
fn full_read(&self, path: &str) -> Result<Self::DataCompletion>;
|
||||
fn full_write(&self, path: &str, content: Vec<u8>) -> Result<Self::DataCompletion>;
|
||||
type DataCompletionBytes: DataCompletion<u8>;
|
||||
type DataCompletionTransform: DataCompletion<DatabaseRowTransformResult>;
|
||||
fn full_read(&self, path: &str) -> Result<Self::DataCompletionBytes>;
|
||||
fn full_write(&self, path: &str, content: Vec<u8>) -> Result<Self::DataCompletionBytes>;
|
||||
fn transform(
|
||||
&self,
|
||||
mutations: Vec<DatabaseRowMutation>,
|
||||
) -> Result<Self::DataCompletionTransform>;
|
||||
fn http(
|
||||
&self,
|
||||
method: &str,
|
||||
path: &str,
|
||||
body: Option<Vec<u8>>,
|
||||
headers: &[(&str, &str)],
|
||||
) -> Result<Self::DataCompletion>;
|
||||
) -> Result<Self::DataCompletionBytes>;
|
||||
}
|
||||
|
||||
@@ -2,10 +2,7 @@ use std::{cell::RefCell, collections::HashMap, sync::Arc};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{errors::Error, Result};
|
||||
|
||||
pub type Transform<Ctx> =
|
||||
Arc<dyn Fn(&Ctx, DatabaseRowMutation) -> Result<Option<DatabaseRowStatement>> + 'static>;
|
||||
use crate::{database_sync_operations::MutexSlot, errors::Error, Result};
|
||||
|
||||
pub struct Coro<Ctx> {
|
||||
pub ctx: RefCell<Ctx>,
|
||||
@@ -48,15 +45,28 @@ pub struct DbSyncStatus {
|
||||
pub max_frame_no: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DbChangesStatus {
|
||||
pub time: turso_core::Instant,
|
||||
pub revision: DatabasePullRevision,
|
||||
pub file_path: String,
|
||||
pub file_slot: Option<MutexSlot<Arc<dyn turso_core::File>>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DbChangesStatus {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("DbChangesStatus")
|
||||
.field("time", &self.time)
|
||||
.field("revision", &self.revision)
|
||||
.field("file_slot.is_some()", &self.file_slot.is_some())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SyncEngineStats {
|
||||
pub cdc_operations: i64,
|
||||
pub wal_size: i64,
|
||||
pub main_wal_size: u64,
|
||||
pub revert_wal_size: u64,
|
||||
pub last_pull_unix_time: i64,
|
||||
pub last_push_unix_time: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
@@ -66,8 +76,11 @@ pub enum DatabaseChangeType {
|
||||
Insert,
|
||||
}
|
||||
|
||||
pub const DATABASE_METADATA_VERSION: &str = "v1";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
|
||||
pub struct DatabaseMetadata {
|
||||
pub version: String,
|
||||
/// Unique identifier of the client - generated on sync startup
|
||||
pub client_unique_id: String,
|
||||
/// Latest generation from remote which was pulled locally to the Synced DB
|
||||
@@ -75,6 +88,10 @@ pub struct DatabaseMetadata {
|
||||
/// pair of frame_no for Draft and Synced DB such that content of the database file up to these frames is identical
|
||||
pub revert_since_wal_salt: Option<Vec<u32>>,
|
||||
pub revert_since_wal_watermark: u64,
|
||||
/// Unix time of last successful pull
|
||||
pub last_pull_unix_time: i64,
|
||||
/// Unix time of last successful push
|
||||
pub last_push_unix_time: Option<i64>,
|
||||
pub last_pushed_pull_gen_hint: i64,
|
||||
pub last_pushed_change_id_hint: i64,
|
||||
}
|
||||
@@ -269,11 +286,20 @@ pub struct DatabaseRowMutation {
|
||||
pub updates: Option<HashMap<String, turso_core::Value>>,
|
||||
}
|
||||
|
||||
pub struct DatabaseRowStatement {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DatabaseStatementReplay {
|
||||
pub sql: String,
|
||||
pub values: Vec<turso_core::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DatabaseRowTransformResult {
|
||||
Keep,
|
||||
Skip,
|
||||
Rewrite(DatabaseStatementReplay),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DatabaseTapeRowChangeType {
|
||||
Delete {
|
||||
before: Vec<turso_core::Value>,
|
||||
@@ -304,12 +330,13 @@ impl From<&DatabaseTapeRowChangeType> for DatabaseChangeType {
|
||||
/// by consuming events from [crate::database_tape::DatabaseChangesIterator]
|
||||
#[derive(Debug)]
|
||||
pub enum DatabaseTapeOperation {
|
||||
StmtReplay(DatabaseStatementReplay),
|
||||
RowChange(DatabaseTapeRowChange),
|
||||
Commit,
|
||||
}
|
||||
|
||||
/// [DatabaseTapeRowChange] is the specific operation over single row which can be performed on database
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DatabaseTapeRowChange {
|
||||
pub change_id: i64,
|
||||
pub change_time: u64,
|
||||
|
||||
20
sync/javascript/.github/renovate.json
vendored
20
sync/javascript/.github/renovate.json
vendored
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": ["config:base", "group:allNonMajor", ":preserveSemverRanges", ":disablePeerDependencies"],
|
||||
"labels": ["dependencies"],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchPackageNames": ["@napi/cli", "napi", "napi-build", "napi-derive"],
|
||||
"addLabels": ["napi-rs"],
|
||||
"groupName": "napi-rs"
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": ["^eslint", "^@typescript-eslint"],
|
||||
"groupName": "linter"
|
||||
}
|
||||
],
|
||||
"commitMessagePrefix": "chore: ",
|
||||
"commitMessageAction": "bump up",
|
||||
"commitMessageTopic": "{{depName}} version",
|
||||
"ignoreDeps": []
|
||||
}
|
||||
138
sync/javascript/.gitignore
vendored
138
sync/javascript/.gitignore
vendored
@@ -1,138 +0,0 @@
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
||||
|
||||
|
||||
#Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
*.node
|
||||
*.wasm
|
||||
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
/npm
|
||||
|
||||
*-draft
|
||||
*-synced
|
||||
*-info
|
||||
|
||||
package.native.json
|
||||
942
sync/javascript/.yarn/releases/yarn-4.9.2.cjs
vendored
942
sync/javascript/.yarn/releases/yarn-4.9.2.cjs
vendored
File diff suppressed because one or more lines are too long
@@ -1,5 +0,0 @@
|
||||
nodeLinker: node-modules
|
||||
|
||||
npmAuditRegistry: "https://registry.npmjs.org"
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.9.2.cjs
|
||||
@@ -1,20 +0,0 @@
|
||||
pack-native:
|
||||
npm publish --dry-run && npm pack
|
||||
pack-browser:
|
||||
cp package.json package.native.json
|
||||
cp package.browser.json package.json
|
||||
npm publish --dry-run && npm pack; cp package.native.json package.json
|
||||
|
||||
publish-native:
|
||||
npm publish --access public
|
||||
publish-browser:
|
||||
cp package.json package.native.json
|
||||
cp package.browser.json package.json
|
||||
npm publish --access public; cp package.native.json package.json
|
||||
|
||||
publish-native-next:
|
||||
npm publish --tag next --access public
|
||||
publish-browser-next:
|
||||
cp package.json package.native.json
|
||||
cp package.browser.json package.json
|
||||
npm publish --tag next --access public; cp package.native.json package.json
|
||||
@@ -1 +0,0 @@
|
||||
export * from '@tursodatabase/sync-wasm32-wasi'
|
||||
@@ -1,33 +0,0 @@
|
||||
import { connect } from '@tursodatabase/sync';
|
||||
|
||||
const db = await connect({
|
||||
path: 'local.db',
|
||||
url: process.env.TURSO_URL,
|
||||
authToken: process.env.TURSO_AUTH_TOKEN,
|
||||
clientName: 'turso-sync-example'
|
||||
});
|
||||
|
||||
await db.sync();
|
||||
|
||||
console.info("database initialized and ready to accept writes")
|
||||
|
||||
{
|
||||
console.info("data from remote")
|
||||
let stmt = await db.prepare('SELECT * FROM users');
|
||||
console.info(await stmt.all());
|
||||
}
|
||||
|
||||
|
||||
for (let i = 0; i < 2; i++) {
|
||||
let id = Math.ceil(Math.random() * 100000);
|
||||
await db.exec(`INSERT INTO users VALUES (${id}, 'random-name-${id}')`);
|
||||
}
|
||||
|
||||
{
|
||||
console.info("data after local insert")
|
||||
let stmt = await db.prepare('SELECT * FROM users');
|
||||
console.info(await stmt.all());
|
||||
}
|
||||
|
||||
console.info("sync changes with the remote")
|
||||
await db.sync();
|
||||
39
sync/javascript/examples/cloud/package-lock.json
generated
39
sync/javascript/examples/cloud/package-lock.json
generated
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"name": "cloud",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "cloud",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@tursodatabase/sync": "../.."
|
||||
}
|
||||
},
|
||||
"../..": {
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.1.4-pre.2",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@tursodatabase/turso": "file:../../bindings/javascript"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"@types/node": "^24.2.0",
|
||||
"ava": "^6.0.1",
|
||||
"tsc": "^2.0.4",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@tursodatabase/sync": {
|
||||
"resolved": "../..",
|
||||
"link": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"name": "cloud",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@tursodatabase/sync": "../.."
|
||||
}
|
||||
}
|
||||
@@ -1,406 +0,0 @@
|
||||
// prettier-ignore
|
||||
/* eslint-disable */
|
||||
// @ts-nocheck
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
import { createRequire } from 'node:module'
|
||||
const require = createRequire(import.meta.url)
|
||||
const __dirname = new URL('.', import.meta.url).pathname
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
let nativeBinding = null
|
||||
const loadErrors = []
|
||||
|
||||
const isMusl = () => {
|
||||
let musl = false
|
||||
if (process.platform === 'linux') {
|
||||
musl = isMuslFromFilesystem()
|
||||
if (musl === null) {
|
||||
musl = isMuslFromReport()
|
||||
}
|
||||
if (musl === null) {
|
||||
musl = isMuslFromChildProcess()
|
||||
}
|
||||
}
|
||||
return musl
|
||||
}
|
||||
|
||||
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
|
||||
|
||||
const isMuslFromFilesystem = () => {
|
||||
try {
|
||||
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const isMuslFromReport = () => {
|
||||
let report = null
|
||||
if (typeof process.report?.getReport === 'function') {
|
||||
process.report.excludeNetwork = true
|
||||
report = process.report.getReport()
|
||||
}
|
||||
if (!report) {
|
||||
return null
|
||||
}
|
||||
if (report.header && report.header.glibcVersionRuntime) {
|
||||
return false
|
||||
}
|
||||
if (Array.isArray(report.sharedObjects)) {
|
||||
if (report.sharedObjects.some(isFileMusl)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
const isMuslFromChildProcess = () => {
|
||||
try {
|
||||
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
|
||||
} catch (e) {
|
||||
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function requireNative() {
|
||||
if (process.env.NAPI_RS_NATIVE_LIBRARY_PATH) {
|
||||
try {
|
||||
nativeBinding = require(process.env.NAPI_RS_NATIVE_LIBRARY_PATH);
|
||||
} catch (err) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
} else if (process.platform === 'android') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso-sync-js.android-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-android-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso-sync-js.android-arm-eabi.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-android-arm-eabi')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'win32') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso-sync-js.win32-x64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-win32-x64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'ia32') {
|
||||
try {
|
||||
return require('./turso-sync-js.win32-ia32-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-win32-ia32-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso-sync-js.win32-arm64-msvc.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-win32-arm64-msvc')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'darwin') {
|
||||
try {
|
||||
return require('./turso-sync-js.darwin-universal.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-darwin-universal')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso-sync-js.darwin-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-darwin-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso-sync-js.darwin-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-darwin-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'freebsd') {
|
||||
if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso-sync-js.freebsd-x64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-freebsd-x64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso-sync-js.freebsd-arm64.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-freebsd-arm64')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'linux') {
|
||||
if (process.arch === 'x64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-x64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-x64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-x64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-x64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm-musleabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm-musleabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm-gnueabihf.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm-gnueabihf')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'riscv64') {
|
||||
if (isMusl()) {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-riscv64-musl.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-riscv64-musl')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-riscv64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-riscv64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
}
|
||||
} else if (process.arch === 'ppc64') {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-ppc64-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-ppc64-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 's390x') {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-s390x-gnu.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-s390x-gnu')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
|
||||
}
|
||||
} else if (process.platform === 'openharmony') {
|
||||
if (process.arch === 'arm64') {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm64-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm64-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'x64') {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-x64-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-x64-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else if (process.arch === 'arm') {
|
||||
try {
|
||||
return require('./turso-sync-js.linux-arm-ohos.node')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
try {
|
||||
return require('@tursodatabase/sync-linux-arm-ohos')
|
||||
} catch (e) {
|
||||
loadErrors.push(e)
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported architecture on OpenHarmony: ${process.arch}`))
|
||||
}
|
||||
} else {
|
||||
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
|
||||
}
|
||||
}
|
||||
|
||||
nativeBinding = requireNative()
|
||||
|
||||
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
|
||||
try {
|
||||
nativeBinding = require('./turso-sync-js.wasi.cjs')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
if (!nativeBinding) {
|
||||
try {
|
||||
nativeBinding = require('@tursodatabase/sync-wasm32-wasi')
|
||||
} catch (err) {
|
||||
if (process.env.NAPI_RS_FORCE_WASI) {
|
||||
loadErrors.push(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!nativeBinding) {
|
||||
if (loadErrors.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot find native binding. ` +
|
||||
`npm has a bug related to optional dependencies (https://github.com/npm/cli/issues/4828). ` +
|
||||
'Please try `npm i` again after removing both package-lock.json and node_modules directory.',
|
||||
{ cause: loadErrors }
|
||||
)
|
||||
}
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { Database, Statement, GeneratorHolder, JsDataCompletion, JsDataPollResult, JsProtocolIo, JsProtocolRequestData, SyncEngine, DatabaseChangeTypeJs, SyncEngineProtocolVersion } = nativeBinding
|
||||
export { Database }
|
||||
export { Statement }
|
||||
export { GeneratorHolder }
|
||||
export { JsDataCompletion }
|
||||
export { JsDataPollResult }
|
||||
export { JsProtocolIo }
|
||||
export { JsProtocolRequestData }
|
||||
export { SyncEngine }
|
||||
export { DatabaseChangeTypeJs }
|
||||
export { SyncEngineProtocolVersion }
|
||||
@@ -1,57 +0,0 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync-browser",
|
||||
"version": "0.1.5-pre.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"description": "Sync engine for the Turso database library specifically for browser/web environment",
|
||||
"module": "./dist/sync_engine.js",
|
||||
"main": "./dist/sync_engine.js",
|
||||
"type": "module",
|
||||
"exports": "./dist/sync_engine.js",
|
||||
"files": [
|
||||
"browser.js",
|
||||
"dist/**"
|
||||
],
|
||||
"types": "./dist/sync_engine.d.ts",
|
||||
"napi": {
|
||||
"binaryName": "turso-sync-js",
|
||||
"targets": [
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"@types/node": "^24.2.0",
|
||||
"ava": "^6.0.1",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "npm exec tsc && napi build --platform --release --esm",
|
||||
"build:debug": "npm exec tsc && napi build --platform",
|
||||
"prepublishOnly": "npm exec tsc && napi prepublish -t npm --skip-optional-publish",
|
||||
"test": "true",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"imports": {
|
||||
"#entry-point": {
|
||||
"types": "./index.d.ts",
|
||||
"browser": "./browser.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "~0.1.4-pre.5"
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
{
|
||||
"name": "@tursodatabase/sync",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tursodatabase/turso"
|
||||
},
|
||||
"description": "Sync engine for the Turso database library",
|
||||
"module": "./dist/sync_engine.js",
|
||||
"main": "./dist/sync_engine.js",
|
||||
"type": "module",
|
||||
"exports": "./dist/sync_engine.js",
|
||||
"files": [
|
||||
"browser.js",
|
||||
"index.js",
|
||||
"dist/**"
|
||||
],
|
||||
"types": "./dist/sync_engine.d.ts",
|
||||
"napi": {
|
||||
"binaryName": "turso-sync-js",
|
||||
"targets": [
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"universal-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"wasm32-wasip1-threads"
|
||||
]
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^3.0.4",
|
||||
"@napi-rs/wasm-runtime": "^1.0.1",
|
||||
"@types/node": "^24.2.0",
|
||||
"ava": "^6.0.1",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "npm exec tsc && napi build --platform --release --esm",
|
||||
"build:debug": "npm exec tsc && napi build --platform",
|
||||
"prepublishOnly": "npm exec tsc && napi prepublish -t npm",
|
||||
"test": "true",
|
||||
"universal": "napi universalize",
|
||||
"version": "napi version"
|
||||
},
|
||||
"packageManager": "yarn@4.9.2",
|
||||
"imports": {
|
||||
"#entry-point": {
|
||||
"types": "./index.d.ts",
|
||||
"browser": "./browser.js",
|
||||
"node": "./index.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@tursodatabase/database": "~0.1.4-pre.5"
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use napi::Env;
|
||||
use napi_derive::napi;
|
||||
use std::{
|
||||
future::Future,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use turso_sync_engine::types::ProtocolCommand;
|
||||
|
||||
pub const GENERATOR_RESUME_IO: u32 = 0;
|
||||
pub const GENERATOR_RESUME_DONE: u32 = 1;
|
||||
|
||||
pub trait Generator {
|
||||
fn resume(&mut self, env: Env, result: Option<String>) -> napi::Result<u32>;
|
||||
}
|
||||
|
||||
impl<F: Future<Output = turso_sync_engine::Result<()>>> Generator
|
||||
for genawaiter::sync::Gen<ProtocolCommand, turso_sync_engine::Result<Env>, F>
|
||||
{
|
||||
fn resume(&mut self, env: Env, error: Option<String>) -> napi::Result<u32> {
|
||||
let result = match error {
|
||||
Some(err) => Err(turso_sync_engine::errors::Error::DatabaseSyncEngineError(
|
||||
format!("JsProtocolIo error: {err}"),
|
||||
)),
|
||||
None => Ok(env),
|
||||
};
|
||||
match self.resume_with(result) {
|
||||
genawaiter::GeneratorState::Yielded(ProtocolCommand::IO) => Ok(GENERATOR_RESUME_IO),
|
||||
genawaiter::GeneratorState::Complete(Ok(())) => Ok(GENERATOR_RESUME_DONE),
|
||||
genawaiter::GeneratorState::Complete(Err(err)) => Err(napi::Error::new(
|
||||
napi::Status::GenericFailure,
|
||||
format!("sync engine operation failed: {err}"),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[napi(discriminant = "type")]
|
||||
pub enum GeneratorResponse {
|
||||
SyncEngineStats { operations: i64, wal: i64 },
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub struct GeneratorHolder {
|
||||
pub(crate) inner: Box<Mutex<dyn Generator>>,
|
||||
pub(crate) response: Arc<Mutex<Option<GeneratorResponse>>>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GeneratorHolder {
|
||||
#[napi]
|
||||
pub fn resume(&self, env: Env, error: Option<String>) -> napi::Result<u32> {
|
||||
self.inner.lock().unwrap().resume(env, error)
|
||||
}
|
||||
#[napi]
|
||||
pub fn take(&self) -> Option<GeneratorResponse> {
|
||||
self.response.lock().unwrap().take()
|
||||
}
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
import { SyncEngine, DatabaseRowMutationJs, DatabaseRowStatementJs } from '#entry-point';
|
||||
import { Database } from '@tursodatabase/database';
|
||||
|
||||
const GENERATOR_RESUME_IO = 0;
|
||||
const GENERATOR_RESUME_DONE = 1;
|
||||
|
||||
function trackPromise<T>(p: Promise<T>): { promise: Promise<T>, finished: boolean } {
|
||||
let status = { promise: null, finished: false };
|
||||
status.promise = p.finally(() => status.finished = true);
|
||||
return status;
|
||||
}
|
||||
|
||||
function timeoutMs(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms,))
|
||||
}
|
||||
|
||||
async function read(opts, path: string): Promise<Buffer | Uint8Array | null> {
|
||||
if (opts.isMemory) {
|
||||
return opts.value;
|
||||
}
|
||||
if (typeof window === 'undefined') {
|
||||
const { promises } = await import('node:fs');
|
||||
try {
|
||||
return await promises.readFile(path);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
} else {
|
||||
const data = localStorage.getItem(path);
|
||||
if (data != null) {
|
||||
return new TextEncoder().encode(data);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function write(opts, path: string, content: number[]): Promise<void> {
|
||||
if (opts.isMemory) {
|
||||
opts.value = content;
|
||||
return;
|
||||
}
|
||||
const data = new Uint8Array(content);
|
||||
if (typeof window === 'undefined') {
|
||||
const { promises } = await import('node:fs');
|
||||
const unix = Math.floor(Date.now() / 1000);
|
||||
const nonce = Math.floor(Math.random() * 1000000000);
|
||||
const tmp = `${path}.tmp.${unix}.${nonce}`;
|
||||
await promises.writeFile(tmp, data);
|
||||
await promises.rename(tmp, path);
|
||||
} else {
|
||||
localStorage.setItem(path, new TextDecoder().decode(data));
|
||||
}
|
||||
}
|
||||
|
||||
async function process(opts, request) {
|
||||
const requestType = request.request();
|
||||
const completion = request.completion();
|
||||
if (requestType.type == 'Http') {
|
||||
try {
|
||||
let headers = opts.headers;
|
||||
if (requestType.headers != null && requestType.headers.length > 0) {
|
||||
headers = { ...opts.headers };
|
||||
for (let header of requestType.headers) {
|
||||
headers[header[0]] = header[1];
|
||||
}
|
||||
}
|
||||
const response = await fetch(`${opts.url}${requestType.path}`, {
|
||||
method: requestType.method,
|
||||
headers: headers,
|
||||
body: requestType.body != null ? new Uint8Array(requestType.body) : null,
|
||||
});
|
||||
completion.status(response.status);
|
||||
const reader = response.body.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
completion.done();
|
||||
break;
|
||||
}
|
||||
completion.push(value);
|
||||
}
|
||||
} catch (error) {
|
||||
completion.poison(`fetch error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullRead') {
|
||||
try {
|
||||
const metadata = await read(opts.metadata, requestType.path);
|
||||
if (metadata != null) {
|
||||
completion.push(metadata);
|
||||
}
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata read error: ${error}`);
|
||||
}
|
||||
} else if (requestType.type == 'FullWrite') {
|
||||
try {
|
||||
await write(opts.metadata, requestType.path, requestType.content);
|
||||
completion.done();
|
||||
} catch (error) {
|
||||
completion.poison(`metadata write error: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function run(opts, engine, generator): Promise<any> {
|
||||
let tasks = [];
|
||||
while (generator.resume(null) !== GENERATOR_RESUME_DONE) {
|
||||
for (let request = engine.protocolIo(); request != null; request = engine.protocolIo()) {
|
||||
tasks.push(trackPromise(process(opts, request)));
|
||||
}
|
||||
|
||||
const tasksRace = tasks.length == 0 ? Promise.resolve() : Promise.race([timeoutMs(opts.preemptionMs), ...tasks.map(t => t.promise)]);
|
||||
await Promise.all([engine.ioLoopAsync(), tasksRace]);
|
||||
|
||||
tasks = tasks.filter(t => !t.finished);
|
||||
}
|
||||
return generator.take();
|
||||
}
|
||||
|
||||
interface ConnectOpts {
|
||||
path: string;
|
||||
clientName?: string;
|
||||
url: string;
|
||||
authToken?: string;
|
||||
encryptionKey?: string;
|
||||
tablesIgnore?: string[],
|
||||
transform?: (arg: DatabaseRowMutationJs) => DatabaseRowStatementJs | null,
|
||||
enableTracing?: string,
|
||||
}
|
||||
|
||||
interface Sync {
|
||||
sync(): Promise<void>;
|
||||
push(): Promise<void>;
|
||||
pull(): Promise<void>;
|
||||
checkpoint(): Promise<void>;
|
||||
stats(): Promise<{ operations: number, wal: number }>;
|
||||
}
|
||||
|
||||
export async function connect(opts: ConnectOpts): Database & Sync {
|
||||
const engine = new SyncEngine({
|
||||
path: opts.path,
|
||||
clientName: opts.clientName,
|
||||
tablesIgnore: opts.tablesIgnore,
|
||||
transform: opts.transform,
|
||||
enableTracing: opts.enableTracing
|
||||
});
|
||||
const httpOpts = {
|
||||
url: opts.url,
|
||||
headers: {
|
||||
...(opts.authToken != null && { "Authorization": `Bearer ${opts.authToken}` }),
|
||||
...(opts.encryptionKey != null && { "x-turso-encryption-key": opts.encryptionKey })
|
||||
},
|
||||
metadata: opts.path == ':memory:' ? { isMemory: true, value: null } : { isMemory: false },
|
||||
preemptionMs: 1,
|
||||
};
|
||||
await run(httpOpts, engine, engine.init());
|
||||
const nativeDb = engine.open();
|
||||
const db = Database.create();
|
||||
db.initialize(nativeDb, opts.path, false);
|
||||
db.sync = async function () { await run(httpOpts, engine, engine.sync()); }
|
||||
db.pull = async function () { await run(httpOpts, engine, engine.pull()); }
|
||||
db.push = async function () { await run(httpOpts, engine, engine.push()); }
|
||||
db.checkpoint = async function () { await run(httpOpts, engine, engine.checkpoint()); }
|
||||
db.stats = async function () { return (await run(httpOpts, engine, engine.stats())); }
|
||||
return db;
|
||||
}
|
||||
|
||||
export { Database, Sync };
|
||||
@@ -1,120 +0,0 @@
|
||||
/* eslint-disable */
|
||||
/* prettier-ignore */
|
||||
|
||||
/* auto-generated by NAPI-RS */
|
||||
|
||||
const __nodeFs = require('node:fs')
|
||||
const __nodePath = require('node:path')
|
||||
const { WASI: __nodeWASI } = require('node:wasi')
|
||||
const { Worker } = require('node:worker_threads')
|
||||
|
||||
const {
|
||||
createOnMessage: __wasmCreateOnMessageForFsProxy,
|
||||
getDefaultContext: __emnapiGetDefaultContext,
|
||||
instantiateNapiModuleSync: __emnapiInstantiateNapiModuleSync,
|
||||
} = require('@napi-rs/wasm-runtime')
|
||||
|
||||
const __rootDir = __nodePath.parse(process.cwd()).root
|
||||
|
||||
const __wasi = new __nodeWASI({
|
||||
version: 'preview1',
|
||||
env: process.env,
|
||||
preopens: {
|
||||
[__rootDir]: __rootDir,
|
||||
}
|
||||
})
|
||||
|
||||
const __emnapiContext = __emnapiGetDefaultContext()
|
||||
|
||||
const __sharedMemory = new WebAssembly.Memory({
|
||||
initial: 4000,
|
||||
maximum: 65536,
|
||||
shared: true,
|
||||
})
|
||||
|
||||
let __wasmFilePath = __nodePath.join(__dirname, 'turso-sync-js.wasm32-wasi.wasm')
|
||||
const __wasmDebugFilePath = __nodePath.join(__dirname, 'turso-sync-js.wasm32-wasi.debug.wasm')
|
||||
|
||||
if (__nodeFs.existsSync(__wasmDebugFilePath)) {
|
||||
__wasmFilePath = __wasmDebugFilePath
|
||||
} else if (!__nodeFs.existsSync(__wasmFilePath)) {
|
||||
try {
|
||||
__wasmFilePath = __nodePath.resolve('@tursodatabase/sync-wasm32-wasi')
|
||||
} catch {
|
||||
throw new Error('Cannot find turso-sync-js.wasm32-wasi.wasm file, and @tursodatabase/sync-wasm32-wasi package is not installed.')
|
||||
}
|
||||
}
|
||||
|
||||
const { instance: __napiInstance, module: __wasiModule, napiModule: __napiModule } = __emnapiInstantiateNapiModuleSync(__nodeFs.readFileSync(__wasmFilePath), {
|
||||
context: __emnapiContext,
|
||||
asyncWorkPoolSize: (function() {
|
||||
const threadsSizeFromEnv = Number(process.env.NAPI_RS_ASYNC_WORK_POOL_SIZE ?? process.env.UV_THREADPOOL_SIZE)
|
||||
// NaN > 0 is false
|
||||
if (threadsSizeFromEnv > 0) {
|
||||
return threadsSizeFromEnv
|
||||
} else {
|
||||
return 4
|
||||
}
|
||||
})(),
|
||||
reuseWorker: true,
|
||||
wasi: __wasi,
|
||||
onCreateWorker() {
|
||||
const worker = new Worker(__nodePath.join(__dirname, 'wasi-worker.mjs'), {
|
||||
env: process.env,
|
||||
})
|
||||
worker.onmessage = ({ data }) => {
|
||||
__wasmCreateOnMessageForFsProxy(__nodeFs)(data)
|
||||
}
|
||||
|
||||
// The main thread of Node.js waits for all the active handles before exiting.
|
||||
// But Rust threads are never waited without `thread::join`.
|
||||
// So here we hack the code of Node.js to prevent the workers from being referenced (active).
|
||||
// According to https://github.com/nodejs/node/blob/19e0d472728c79d418b74bddff588bea70a403d0/lib/internal/worker.js#L415,
|
||||
// a worker is consist of two handles: kPublicPort and kHandle.
|
||||
{
|
||||
const kPublicPort = Object.getOwnPropertySymbols(worker).find(s =>
|
||||
s.toString().includes("kPublicPort")
|
||||
);
|
||||
if (kPublicPort) {
|
||||
worker[kPublicPort].ref = () => {};
|
||||
}
|
||||
|
||||
const kHandle = Object.getOwnPropertySymbols(worker).find(s =>
|
||||
s.toString().includes("kHandle")
|
||||
);
|
||||
if (kHandle) {
|
||||
worker[kHandle].ref = () => {};
|
||||
}
|
||||
|
||||
worker.unref();
|
||||
}
|
||||
return worker
|
||||
},
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: __sharedMemory,
|
||||
}
|
||||
return importObject
|
||||
},
|
||||
beforeInit({ instance }) {
|
||||
for (const name of Object.keys(instance.exports)) {
|
||||
if (name.startsWith('__napi_register__')) {
|
||||
instance.exports[name]()
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
module.exports = __napiModule.exports
|
||||
module.exports.Database = __napiModule.exports.Database
|
||||
module.exports.Statement = __napiModule.exports.Statement
|
||||
module.exports.GeneratorHolder = __napiModule.exports.GeneratorHolder
|
||||
module.exports.JsDataCompletion = __napiModule.exports.JsDataCompletion
|
||||
module.exports.JsDataPollResult = __napiModule.exports.JsDataPollResult
|
||||
module.exports.JsProtocolIo = __napiModule.exports.JsProtocolIo
|
||||
module.exports.JsProtocolRequestData = __napiModule.exports.JsProtocolRequestData
|
||||
module.exports.SyncEngine = __napiModule.exports.SyncEngine
|
||||
module.exports.DatabaseChangeTypeJs = __napiModule.exports.DatabaseChangeTypeJs
|
||||
module.exports.SyncEngineProtocolVersion = __napiModule.exports.SyncEngineProtocolVersion
|
||||
@@ -1,63 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import { createRequire } from "node:module";
|
||||
import { parse } from "node:path";
|
||||
import { WASI } from "node:wasi";
|
||||
import { parentPort, Worker } from "node:worker_threads";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const { instantiateNapiModuleSync, MessageHandler, getDefaultContext } = require("@napi-rs/wasm-runtime");
|
||||
|
||||
if (parentPort) {
|
||||
parentPort.on("message", (data) => {
|
||||
globalThis.onmessage({ data });
|
||||
});
|
||||
}
|
||||
|
||||
Object.assign(globalThis, {
|
||||
self: globalThis,
|
||||
require,
|
||||
Worker,
|
||||
importScripts: function (f) {
|
||||
;(0, eval)(fs.readFileSync(f, "utf8") + "//# sourceURL=" + f);
|
||||
},
|
||||
postMessage: function (msg) {
|
||||
if (parentPort) {
|
||||
parentPort.postMessage(msg);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const emnapiContext = getDefaultContext();
|
||||
|
||||
const __rootDir = parse(process.cwd()).root;
|
||||
|
||||
const handler = new MessageHandler({
|
||||
onLoad({ wasmModule, wasmMemory }) {
|
||||
const wasi = new WASI({
|
||||
version: 'preview1',
|
||||
env: process.env,
|
||||
preopens: {
|
||||
[__rootDir]: __rootDir,
|
||||
},
|
||||
});
|
||||
|
||||
return instantiateNapiModuleSync(wasmModule, {
|
||||
childThread: true,
|
||||
wasi,
|
||||
context: emnapiContext,
|
||||
overwriteImports(importObject) {
|
||||
importObject.env = {
|
||||
...importObject.env,
|
||||
...importObject.napi,
|
||||
...importObject.emnapi,
|
||||
memory: wasmMemory
|
||||
};
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
globalThis.onmessage = function (e) {
|
||||
handler.handle(e);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user