diff --git a/bindings/javascript/src/lib.rs b/bindings/javascript/src/lib.rs index 4a8b5c3ab..dcaaefc5b 100644 --- a/bindings/javascript/src/lib.rs +++ b/bindings/javascript/src/lib.rs @@ -180,7 +180,7 @@ fn connect_sync(db: &DatabaseInner) -> napi::Result<()> { .open_file(&db.path, flags, false) .map_err(|e| to_generic_error("failed to open file", e))?; - let db_file = Arc::new(DatabaseFile::new(file)); + let db_file = DatabaseFile::new(file); let db_core = turso_core::Database::open_with_flags( io.clone(), &db.path, diff --git a/core/lib.rs b/core/lib.rs index 557fbabb5..5e0288670 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -206,7 +206,7 @@ static DATABASE_MANAGER: LazyLock>>> = pub struct Database { mv_store: Option>, schema: Mutex>, - db_file: Arc, + db_file: DatabaseFile, path: String, wal_path: String, pub io: Arc, @@ -305,7 +305,7 @@ impl Database { encryption_opts: Option, ) -> Result> { let file = io.open_file(path, flags, true)?; - let db_file = Arc::new(DatabaseFile::new(file)); + let db_file = DatabaseFile::new(file); Self::open_with_flags(io, path, db_file, flags, opts, encryption_opts) } @@ -313,7 +313,7 @@ impl Database { pub fn open( io: Arc, path: &str, - db_file: Arc, + db_file: DatabaseFile, enable_mvcc: bool, enable_indexes: bool, ) -> Result> { @@ -333,7 +333,7 @@ impl Database { pub fn open_with_flags( io: Arc, path: &str, - db_file: Arc, + db_file: DatabaseFile, flags: OpenFlags, opts: DatabaseOpts, encryption_opts: Option, @@ -381,7 +381,7 @@ impl Database { io: Arc, path: &str, wal_path: &str, - db_file: Arc, + db_file: DatabaseFile, flags: OpenFlags, opts: DatabaseOpts, encryption_opts: Option, @@ -402,7 +402,7 @@ impl Database { io: Arc, path: &str, wal_path: &str, - db_file: Arc, + db_file: DatabaseFile, flags: OpenFlags, opts: DatabaseOpts, encryption_opts: Option, diff --git a/core/storage/btree.rs b/core/storage/btree.rs index 0f439f2f3..0dfaff8c1 100644 --- a/core/storage/btree.rs +++ b/core/storage/btree.rs @@ -7825,7 +7825,8 @@ mod tests { }, types::Text, vdbe::Register, - BufferPool, Completion, Connection, IOContext, StepResult, WalFile, WalFileShared, + BufferPool, Completion, Connection, DatabaseStorage, IOContext, StepResult, WalFile, + WalFileShared, }; use std::{ cell::RefCell, @@ -9094,9 +9095,8 @@ mod tests { let io: Arc = Arc::new(MemoryIO::new()); let buffer_pool = BufferPool::begin_init(&io, page_size * 128); - let db_file = Arc::new(DatabaseFile::new( - io.open_file(":memory:", OpenFlags::Create, false).unwrap(), - )); + let db_file = + DatabaseFile::new(io.open_file(":memory:", OpenFlags::Create, false).unwrap()); let wal_file = io.open_file("test.wal", OpenFlags::Create, false).unwrap(); let wal_shared = WalFileShared::new_shared(wal_file).unwrap(); diff --git a/core/storage/database.rs b/core/storage/database.rs index 79b7d0a7a..7f8fc048e 100644 --- a/core/storage/database.rs +++ b/core/storage/database.rs @@ -87,12 +87,11 @@ pub trait DatabaseStorage: Send + Sync { fn truncate(&self, len: usize, c: Completion) -> Result; } -#[cfg(feature = "fs")] +#[derive(Clone)] pub struct DatabaseFile { file: Arc, } -#[cfg(feature = "fs")] impl DatabaseStorage for DatabaseFile { #[instrument(skip_all, level = Level::DEBUG)] fn read_header(&self, c: Completion) -> Result { diff --git a/core/storage/pager.rs b/core/storage/pager.rs index 863a90279..bf11ef1d3 100644 --- a/core/storage/pager.rs +++ b/core/storage/pager.rs @@ -1,3 +1,4 @@ +use crate::storage::database::DatabaseFile; use crate::storage::wal::IOV_MAX; use crate::storage::{ buffer_pool::BufferPool, @@ -505,7 +506,7 @@ enum BtreeCreateVacuumFullState { /// transaction management. pub struct Pager { /// Source of the database pages. - pub db_file: Arc, + pub db_file: DatabaseFile, /// The write-ahead log (WAL) for the database. /// in-memory databases, ephemeral tables and ephemeral indexes do not have a WAL. pub(crate) wal: Option>>, @@ -607,7 +608,7 @@ enum FreePageState { impl Pager { pub fn new( - db_file: Arc, + db_file: DatabaseFile, wal: Option>>, io: Arc, page_cache: Arc>, @@ -2785,9 +2786,8 @@ mod ptrmap_tests { // Helper to create a Pager for testing fn test_pager_setup(page_size: u32, initial_db_pages: u32) -> Pager { let io: Arc = Arc::new(MemoryIO::new()); - let db_file: Arc = Arc::new(DatabaseFile::new( - io.open_file("test.db", OpenFlags::Create, true).unwrap(), - )); + let db_file: DatabaseFile = + DatabaseFile::new(io.open_file("test.db", OpenFlags::Create, true).unwrap()); // Construct interfaces for the pager let pages = initial_db_pages + 10; diff --git a/core/storage/sqlite3_ondisk.rs b/core/storage/sqlite3_ondisk.rs index 2611d0f4a..a08483c66 100644 --- a/core/storage/sqlite3_ondisk.rs +++ b/core/storage/sqlite3_ondisk.rs @@ -58,7 +58,7 @@ use crate::storage::btree::offset::{ }; use crate::storage::btree::{payload_overflow_threshold_max, payload_overflow_threshold_min}; use crate::storage::buffer_pool::BufferPool; -use crate::storage::database::{DatabaseStorage, EncryptionOrChecksum}; +use crate::storage::database::{DatabaseFile, DatabaseStorage, EncryptionOrChecksum}; use crate::storage::pager::Pager; use crate::storage::wal::READMARK_NOT_USED; use crate::types::{RawSlice, RefValue, SerialType, SerialTypeKind, TextRef, TextSubtype}; @@ -899,7 +899,7 @@ impl PageContent { /// if allow_empty_read is set, than empty read will be raise error for the page, but will not panic #[instrument(skip_all, level = Level::DEBUG)] pub fn begin_read_page( - db_file: Arc, + db_file: DatabaseFile, buffer_pool: Arc, page: PageRef, page_idx: usize, @@ -1076,10 +1076,7 @@ pub fn write_pages_vectored( } #[instrument(skip_all, level = Level::DEBUG)] -pub fn begin_sync( - db_file: Arc, - syncing: Arc, -) -> Result { +pub fn begin_sync(db_file: DatabaseFile, syncing: Arc) -> Result { assert!(!syncing.load(Ordering::SeqCst)); syncing.store(true, Ordering::SeqCst); let completion = Completion::new_sync(move |_| { diff --git a/core/vdbe/execute.rs b/core/vdbe/execute.rs index 431f09365..3ef68926c 100644 --- a/core/vdbe/execute.rs +++ b/core/vdbe/execute.rs @@ -7376,7 +7376,7 @@ pub fn op_open_ephemeral( db_file_io = Arc::new(MemoryIO::new()); let file = db_file_io.open_file("temp-file", OpenFlags::Create, false)?; - db_file = Arc::new(DatabaseFile::new(file)); + db_file = DatabaseFile::new(file); } #[cfg(not(target_family = "wasm"))] { @@ -7389,7 +7389,7 @@ pub fn op_open_ephemeral( )); }; let file = io.open_file(rand_path_str, OpenFlags::Create, false)?; - db_file = Arc::new(DatabaseFile::new(file)); + db_file = DatabaseFile::new(file); db_file_io = io; } diff --git a/sync/engine/src/database_sync_engine.rs b/sync/engine/src/database_sync_engine.rs index 0b37b52c1..54f6c91a3 100644 --- a/sync/engine/src/database_sync_engine.rs +++ b/sync/engine/src/database_sync_engine.rs @@ -45,7 +45,7 @@ pub struct DatabaseSyncEngineOpts { pub struct DatabaseSyncEngine { io: Arc, protocol: Arc

, - db_file: Arc, + db_file: turso_core::storage::database::DatabaseFile, main_tape: DatabaseTape, main_db_wal_path: String, revert_db_wal_path: String, @@ -156,7 +156,7 @@ impl DatabaseSyncEngine

{ } let db_file = io.open_file(main_db_path, turso_core::OpenFlags::Create, false)?; - let db_file = Arc::new(turso_core::storage::database::DatabaseFile::new(db_file)); + let db_file = turso_core::storage::database::DatabaseFile::new(db_file); let main_db = turso_core::Database::open_with_flags( io.clone(), diff --git a/tests/integration/functions/test_wal_api.rs b/tests/integration/functions/test_wal_api.rs index 20f4ea6aa..72855f2ed 100644 --- a/tests/integration/functions/test_wal_api.rs +++ b/tests/integration/functions/test_wal_api.rs @@ -864,7 +864,7 @@ fn test_db_share_same_file() { let db_file = io .open_file(path.to_str().unwrap(), turso_core::OpenFlags::Create, false) .unwrap(); - let db_file = Arc::new(turso_core::storage::database::DatabaseFile::new(db_file)); + let db_file = turso_core::storage::database::DatabaseFile::new(db_file); let db1 = turso_core::Database::open_with_flags( io.clone(), path.to_str().unwrap(),