Merge pull request #787 from thesimplekid/remove_redb_mint

refactor: remove redb mint database
This commit is contained in:
thesimplekid
2025-06-21 11:54:12 +01:00
committed by GitHub
15 changed files with 13 additions and 808 deletions

View File

@@ -123,8 +123,7 @@ jobs:
--bin cdk-cli --features "sqlcipher redb",
--bin cdk-mintd,
--bin cdk-mintd --features redis,
--bin cdk-mintd --features redb,
--bin cdk-mintd --features "redis swagger redb",
--bin cdk-mintd --features "redis swagger",
--bin cdk-mintd --features sqlcipher,
--bin cdk-mintd --no-default-features --features lnd,
--bin cdk-mintd --no-default-features --features cln,
@@ -167,7 +166,6 @@ jobs:
]
database:
[
REDB,
SQLITE,
]
steps:
@@ -193,7 +191,6 @@ jobs:
]
database:
[
REDB,
SQLITE,
]
steps:
@@ -358,7 +355,6 @@ jobs:
matrix:
database:
[
REDB,
SQLITE,
]
steps:
@@ -410,4 +406,4 @@ jobs:
- name: Rust Cache
uses: Swatinem/rust-cache@v2
- name: Check docs with strict warnings
run: nix develop -i -L .#stable --command just docs-strict
run: nix develop -i -L .#stable --command just docs-strict

View File

@@ -16,6 +16,8 @@
- Docker build workflow for arm64 images [PR](https://github.com/cashubtc/cdk/pull/770) ([asmo]).
### Changed
- cdk-redb: Removed mint storage functionality to be wallet-only ([thesimplekid]).
- Updated Nix flake to 25.05 and removed Nix cache [PR](https://github.com/cashubtc/cdk/pull/769) ([thesimplekid]).
- Updated dependencies [PR](https://github.com/cashubtc/cdk/pull/761) ([thesimplekid]).
- Refactored NUT-04 and NUT-05 [PR](https://github.com/cashubtc/cdk/pull/749) ([thesimplekid]).
- Updated Nix flake to 25.05 and removed Nix cache [PR](https://github.com/cashubtc/cdk/pull/769) ([thesimplekid]).

View File

@@ -174,7 +174,10 @@ pub async fn create_and_start_test_mint() -> Result<Mint> {
let db_type = env::var("CDK_TEST_DB_TYPE").expect("Database type set");
let mut mint_builder = match db_type.to_lowercase().as_str() {
"sqlite" => {
"memory" => MintBuilder::new()
.with_localstore(Arc::new(cdk_sqlite::mint::memory::empty().await?))
.with_keystore(Arc::new(cdk_sqlite::mint::memory::empty().await?)),
_ => {
// Create a temporary directory for SQLite database
let temp_dir = create_temp_dir("cdk-test-sqlite-mint")?;
let path = temp_dir.join("mint.db").to_str().unwrap().to_string();
@@ -187,24 +190,6 @@ pub async fn create_and_start_test_mint() -> Result<Mint> {
.with_localstore(database.clone())
.with_keystore(database)
}
"redb" => {
// Create a temporary directory for ReDB database
let temp_dir = create_temp_dir("cdk-test-redb-mint")?;
let path = temp_dir.join("mint.redb");
let database = Arc::new(
cdk_redb::MintRedbDatabase::new(&path)
.expect("Could not create redb mint database"),
);
MintBuilder::new()
.with_localstore(database.clone())
.with_keystore(database)
}
"memory" => MintBuilder::new()
.with_localstore(Arc::new(cdk_sqlite::mint::memory::empty().await?))
.with_keystore(Arc::new(cdk_sqlite::mint::memory::empty().await?)),
_ => {
bail!("Db type not set")
}
};
let fee_reserve = FeeReserve {

View File

@@ -21,7 +21,6 @@ fakewallet = ["dep:cdk-fake-wallet"]
grpc-processor = ["dep:cdk-payment-processor", "cdk-signatory/grpc"]
sqlcipher = ["cdk-sqlite/sqlcipher"]
# MSRV is not committed to with redb enabled
redb = ["dep:cdk-redb"]
swagger = ["cdk-axum/swagger", "dep:utoipa", "dep:utoipa-swagger-ui"]
redis = ["cdk-axum/redis"]
auth = ["cdk/auth", "cdk-sqlite/auth"]
@@ -33,10 +32,6 @@ axum.workspace = true
cdk = { workspace = true, features = [
"mint",
] }
cdk-redb = { workspace = true, features = [
"mint",
"auth"
], optional = true }
cdk-sqlite = { workspace = true, features = [
"mint",
] }

View File

@@ -190,8 +190,6 @@ pub struct GrpcProcessor {
pub enum DatabaseEngine {
#[default]
Sqlite,
#[cfg(feature = "redb")]
Redb,
}
impl std::str::FromStr for DatabaseEngine {
@@ -200,8 +198,6 @@ impl std::str::FromStr for DatabaseEngine {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"sqlite" => Ok(DatabaseEngine::Sqlite),
#[cfg(feature = "redb")]
"redb" => Ok(DatabaseEngine::Redb),
_ => Err(format!("Unknown database engine: {s}")),
}
}

View File

@@ -42,10 +42,6 @@ use cdk_mintd::cli::CLIArgs;
use cdk_mintd::config::{self, DatabaseEngine, LnBackend};
use cdk_mintd::env_vars::ENV_WORK_DIR;
use cdk_mintd::setup::LnBackendSetup;
#[cfg(feature = "redb")]
use cdk_redb::mint::MintRedbAuthDatabase;
#[cfg(feature = "redb")]
use cdk_redb::MintRedbDatabase;
use cdk_sqlite::mint::MintSqliteAuthDatabase;
use cdk_sqlite::MintSqliteDatabase;
use clap::Parser;
@@ -131,14 +127,6 @@ async fn main() -> anyhow::Result<()> {
.with_localstore(db.clone())
.with_keystore(db)
}
#[cfg(feature = "redb")]
DatabaseEngine::Redb => {
let redb_path = work_dir.join("cdk-mintd.redb");
let db = Arc::new(MintRedbDatabase::new(&redb_path)?);
MintBuilder::new()
.with_localstore(db.clone())
.with_keystore(db)
}
};
let mut contact_info: Option<Vec<ContactInfo>> = None;
@@ -413,11 +401,6 @@ async fn main() -> anyhow::Result<()> {
Arc::new(sqlite_db)
}
#[cfg(feature = "redb")]
DatabaseEngine::Redb => {
let redb_path = work_dir.join("cdk-mintd-auth.redb");
Arc::new(MintRedbAuthDatabase::new(&redb_path)?)
}
};
mint_builder = mint_builder.with_auth_localstore(auth_localstore.clone());

View File

@@ -12,8 +12,7 @@ readme = "README.md"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["mint", "wallet", "auth"]
mint = ["cdk-common/mint"]
default = ["wallet", "auth"]
wallet = ["cdk-common/wallet"]
auth = ["cdk-common/auth"]

View File

@@ -6,13 +6,12 @@
**ALPHA** This library is in early development, the API will change and should be used with caution.
[Redb](https://github.com/cberner/redb) storage backend implementation for the Cashu Development Kit (CDK).
[Redb](https://github.com/cberner/redb) storage backend implementation for the Cashu Development Kit (CDK) wallet.
## Features
This crate provides a Redb-based storage implementation for:
This crate provides a Redb-based storage implementation for wallet functionality, including:
- Wallet storage
- Mint storage
- Proof tracking
- Transaction history
@@ -25,7 +24,6 @@ Add this to your `Cargo.toml`:
cdk-redb = "*"
```
## License
This project is licensed under the [MIT License](../../LICENSE).

View File

@@ -7,12 +7,8 @@
pub mod error;
mod migrations;
#[cfg(feature = "mint")]
pub mod mint;
#[cfg(feature = "wallet")]
pub mod wallet;
#[cfg(feature = "mint")]
pub use mint::MintRedbDatabase;
#[cfg(feature = "wallet")]
pub use wallet::WalletRedbDatabase;

View File

@@ -1,391 +0,0 @@
use std::cmp::Ordering;
use std::collections::HashMap;
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use async_trait::async_trait;
use cdk_common::database::{self, MintAuthDatabase};
use cdk_common::dhke::hash_to_curve;
use cdk_common::mint::MintKeySetInfo;
use cdk_common::nuts::{AuthProof, BlindSignature, Id, PublicKey, State};
use cdk_common::{AuthRequired, ProtectedEndpoint};
use redb::{Database, ReadableTable, TableDefinition};
use crate::error::Error;
const CONFIG_TABLE: TableDefinition<&str, &str> = TableDefinition::new("config");
const ACTIVE_KEYSET_TABLE: TableDefinition<&str, &str> = TableDefinition::new("active_keyset");
const KEYSETS_TABLE: TableDefinition<&str, &str> = TableDefinition::new("keysets");
const PROOFS_TABLE: TableDefinition<[u8; 33], &str> = TableDefinition::new("proofs");
const PROOFS_STATE_TABLE: TableDefinition<[u8; 33], &str> = TableDefinition::new("proofs_state");
// Key is hex blinded_message B_ value is blinded_signature
const BLINDED_SIGNATURES: TableDefinition<[u8; 33], &str> =
TableDefinition::new("blinded_signatures");
const ENDPOINTS_TABLE: TableDefinition<&str, &str> = TableDefinition::new("endpoints");
/// Mint Redbdatabase
#[derive(Debug, Clone)]
pub struct MintRedbAuthDatabase {
db: Arc<Database>,
}
const DATABASE_VERSION: u32 = 0;
impl MintRedbAuthDatabase {
/// Create new [`MintRedbAuthDatabase`]
pub fn new(path: &Path) -> Result<Self, Error> {
{
// Check database version
let db = Arc::new(Database::create(path)?);
// Check database version
let read_txn = db.begin_read()?;
let table = read_txn.open_table(CONFIG_TABLE);
let db_version = match table {
Ok(table) => table.get("db_version")?.map(|v| v.value().to_owned()),
Err(_) => None,
};
match db_version {
Some(db_version) => {
let current_file_version = u32::from_str(&db_version)?;
match current_file_version.cmp(&DATABASE_VERSION) {
Ordering::Less => {
tracing::info!(
"Database needs to be upgraded at {} current is {}",
current_file_version,
DATABASE_VERSION
);
}
Ordering::Equal => {
tracing::info!("Database is at current version {}", DATABASE_VERSION);
}
Ordering::Greater => {
tracing::warn!(
"Database upgrade did not complete at {} current is {}",
current_file_version,
DATABASE_VERSION
);
return Err(Error::UnknownDatabaseVersion);
}
}
}
None => {
let write_txn = db.begin_write()?;
{
let mut table = write_txn.open_table(CONFIG_TABLE)?;
// Open all tables to init a new db
let _ = write_txn.open_table(ACTIVE_KEYSET_TABLE)?;
let _ = write_txn.open_table(KEYSETS_TABLE)?;
let _ = write_txn.open_table(PROOFS_TABLE)?;
let _ = write_txn.open_table(PROOFS_STATE_TABLE)?;
let _ = write_txn.open_table(BLINDED_SIGNATURES)?;
table.insert("db_version", DATABASE_VERSION.to_string().as_str())?;
}
write_txn.commit()?;
}
}
drop(db);
}
let db = Database::create(path)?;
Ok(Self { db: Arc::new(db) })
}
}
#[async_trait]
impl MintAuthDatabase for MintRedbAuthDatabase {
type Err = database::Error;
async fn set_active_keyset(&self, id: Id) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn
.open_table(ACTIVE_KEYSET_TABLE)
.map_err(Error::from)?;
table
.insert("active_keyset_id", id.to_string().as_str())
.map_err(Error::from)?;
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn get_active_keyset_id(&self) -> Result<Option<Id>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(ACTIVE_KEYSET_TABLE)
.map_err(Error::from)?;
if let Some(id) = table.get("active_keyset_id").map_err(Error::from)? {
return Ok(Some(Id::from_str(id.value()).map_err(Error::from)?));
}
Ok(None)
}
async fn add_keyset_info(&self, keyset: MintKeySetInfo) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn.open_table(KEYSETS_TABLE).map_err(Error::from)?;
table
.insert(
keyset.id.to_string().as_str(),
serde_json::to_string(&keyset)
.map_err(Error::from)?
.as_str(),
)
.map_err(Error::from)?;
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn get_keyset_info(&self, keyset_id: &Id) -> Result<Option<MintKeySetInfo>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn.open_table(KEYSETS_TABLE).map_err(Error::from)?;
match table
.get(keyset_id.to_string().as_str())
.map_err(Error::from)?
{
Some(keyset) => Ok(serde_json::from_str(keyset.value()).map_err(Error::from)?),
None => Ok(None),
}
}
async fn get_keyset_infos(&self) -> Result<Vec<MintKeySetInfo>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn.open_table(KEYSETS_TABLE).map_err(Error::from)?;
let mut keysets = Vec::new();
for (_id, keyset) in (table.iter().map_err(Error::from)?).flatten() {
let keyset = serde_json::from_str(keyset.value()).map_err(Error::from)?;
keysets.push(keyset)
}
Ok(keysets)
}
async fn add_proof(&self, proof: AuthProof) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn.open_table(PROOFS_TABLE).map_err(Error::from)?;
let y: PublicKey = hash_to_curve(&proof.secret.to_bytes()).map_err(Error::from)?;
let y = y.to_bytes();
if table.get(y).map_err(Error::from)?.is_none() {
table
.insert(
y,
serde_json::to_string(&proof).map_err(Error::from)?.as_str(),
)
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn update_proof_state(
&self,
y: &PublicKey,
proof_state: State,
) -> Result<Option<State>, Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
let state_str = serde_json::to_string(&proof_state).map_err(Error::from)?;
let current_state;
{
let mut table = write_txn
.open_table(PROOFS_STATE_TABLE)
.map_err(Error::from)?;
{
match table.get(y.to_bytes()).map_err(Error::from)? {
Some(state) => {
current_state =
Some(serde_json::from_str(state.value()).map_err(Error::from)?)
}
None => current_state = None,
}
}
if current_state != Some(State::Spent) {
table
.insert(y.to_bytes(), state_str.as_str())
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(current_state)
}
async fn get_proofs_states(&self, ys: &[PublicKey]) -> Result<Vec<Option<State>>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(PROOFS_STATE_TABLE)
.map_err(Error::from)?;
let mut states = Vec::with_capacity(ys.len());
for y in ys {
match table.get(y.to_bytes()).map_err(Error::from)? {
Some(state) => states.push(Some(
serde_json::from_str(state.value()).map_err(Error::from)?,
)),
None => states.push(None),
}
}
Ok(states)
}
async fn add_blind_signatures(
&self,
blinded_messages: &[PublicKey],
blind_signatures: &[BlindSignature],
) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn
.open_table(BLINDED_SIGNATURES)
.map_err(Error::from)?;
for (blinded_message, blind_signature) in blinded_messages.iter().zip(blind_signatures)
{
let blind_sig = serde_json::to_string(&blind_signature).map_err(Error::from)?;
table
.insert(blinded_message.to_bytes(), blind_sig.as_str())
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn get_blind_signatures(
&self,
blinded_messages: &[PublicKey],
) -> Result<Vec<Option<BlindSignature>>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(BLINDED_SIGNATURES)
.map_err(Error::from)?;
let mut signatures = Vec::with_capacity(blinded_messages.len());
for blinded_message in blinded_messages {
match table.get(blinded_message.to_bytes()).map_err(Error::from)? {
Some(blind_signature) => signatures.push(Some(
serde_json::from_str(blind_signature.value()).map_err(Error::from)?,
)),
None => signatures.push(None),
}
}
Ok(signatures)
}
async fn add_protected_endpoints(
&self,
protected_endpoints: HashMap<ProtectedEndpoint, AuthRequired>,
) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn.open_table(ENDPOINTS_TABLE).map_err(Error::from)?;
for (endpoint, auth) in protected_endpoints.iter() {
table
.insert(
serde_json::to_string(endpoint)
.map_err(Error::from)?
.as_str(),
serde_json::to_string(&auth).map_err(Error::from)?.as_str(),
)
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn remove_protected_endpoints(
&self,
protected_endpoints: Vec<ProtectedEndpoint>,
) -> Result<(), Self::Err> {
let write_txn = self.db.begin_write().map_err(Error::from)?;
{
let mut table = write_txn.open_table(ENDPOINTS_TABLE).map_err(Error::from)?;
for endpoint in protected_endpoints.iter() {
table
.remove(
serde_json::to_string(endpoint)
.map_err(Error::from)?
.as_str(),
)
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(())
}
async fn get_auth_for_endpoint(
&self,
protected_endpoint: ProtectedEndpoint,
) -> Result<Option<AuthRequired>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn.open_table(ENDPOINTS_TABLE).map_err(Error::from)?;
match table
.get(
serde_json::to_string(&protected_endpoint)
.map_err(Error::from)?
.as_str(),
)
.map_err(Error::from)?
{
Some(auth) => Ok(serde_json::from_str(auth.value()).map_err(Error::from)?),
None => Ok(None),
}
}
async fn get_auth_for_endpoints(
&self,
) -> Result<HashMap<ProtectedEndpoint, Option<AuthRequired>>, Self::Err> {
let read_txn = self.db.begin_read().map_err(Error::from)?;
let table = read_txn.open_table(ENDPOINTS_TABLE).map_err(Error::from)?;
let mut protected = HashMap::new();
for (endpoint, auth) in (table.iter().map_err(Error::from)?).flatten() {
let endpoint: ProtectedEndpoint =
serde_json::from_str(endpoint.value()).map_err(Error::from)?;
let auth: AuthRequired = serde_json::from_str(auth.value()).map_err(Error::from)?;
protected.insert(endpoint, Some(auth));
}
Ok(protected)
}
}

View File

@@ -1,338 +0,0 @@
use core::str;
use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc;
use cdk_common::mint::MintQuote;
use cdk_common::mint_url::MintUrl;
use cdk_common::util::unix_time;
use cdk_common::{Amount, CurrencyUnit, MintQuoteState, Proof, State};
use lightning_invoice::Bolt11Invoice;
use redb::{
Database, MultimapTableDefinition, ReadableMultimapTable, ReadableTable, TableDefinition,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use super::{Error, PROOFS_STATE_TABLE, PROOFS_TABLE, QUOTE_SIGNATURES_TABLE};
const ID_STR_MINT_QUOTES_TABLE: TableDefinition<&str, &str> = TableDefinition::new("mint_quotes");
const PENDING_PROOFS_TABLE: TableDefinition<[u8; 33], &str> =
TableDefinition::new("pending_proofs");
const SPENT_PROOFS_TABLE: TableDefinition<[u8; 33], &str> = TableDefinition::new("spent_proofs");
const QUOTE_PROOFS_TABLE: MultimapTableDefinition<&str, [u8; 33]> =
MultimapTableDefinition::new("quote_proofs");
pub fn migrate_01_to_02(db: Arc<Database>) -> Result<u32, Error> {
migrate_mint_quotes_01_to_02(db)?;
Ok(2)
}
pub fn migrate_02_to_03(db: Arc<Database>) -> Result<u32, Error> {
migrate_mint_proofs_02_to_03(db)?;
Ok(3)
}
pub fn migrate_03_to_04(db: Arc<Database>) -> Result<u32, Error> {
let write_txn = db.begin_write()?;
let _ = write_txn.open_multimap_table(QUOTE_PROOFS_TABLE)?;
let _ = write_txn.open_multimap_table(QUOTE_SIGNATURES_TABLE)?;
Ok(4)
}
pub fn migrate_04_to_05(db: Arc<Database>) -> Result<u32, Error> {
let write_txn = db.begin_write()?;
// Mint quotes
{
const MINT_QUOTE_TABLE_NAME: &str = "mint_quotes";
const OLD_TABLE: TableDefinition<&str, &str> = TableDefinition::new(MINT_QUOTE_TABLE_NAME);
const NEW_TABLE: TableDefinition<[u8; 16], &str> =
TableDefinition::new(MINT_QUOTE_TABLE_NAME);
let old_table = write_txn.open_table(OLD_TABLE)?;
let mut tmp_hashmap = HashMap::new();
for (k, v) in old_table.iter().map_err(Error::from)?.flatten() {
let quote_id = Uuid::try_parse(k.value()).unwrap();
tmp_hashmap.insert(quote_id, v.value().to_string());
}
write_txn.delete_table(old_table).map_err(Error::from)?;
let mut new_table = write_txn.open_table(NEW_TABLE)?;
for (k, v) in tmp_hashmap.into_iter() {
new_table
.insert(k.as_bytes(), v.as_str())
.map_err(Error::from)?;
}
}
// Melt quotes
{
const MELT_QUOTE_TABLE_NAME: &str = "melt_quotes";
const OLD_TABLE: TableDefinition<&str, &str> = TableDefinition::new(MELT_QUOTE_TABLE_NAME);
const NEW_TABLE: TableDefinition<[u8; 16], &str> =
TableDefinition::new(MELT_QUOTE_TABLE_NAME);
let old_table = write_txn.open_table(OLD_TABLE)?;
let mut tmp_hashmap = HashMap::new();
for (k, v) in old_table.iter().map_err(Error::from)?.flatten() {
let quote_id = Uuid::try_parse(k.value()).unwrap();
tmp_hashmap.insert(quote_id, v.value().to_string());
}
write_txn.delete_table(old_table).map_err(Error::from)?;
let mut new_table = write_txn.open_table(NEW_TABLE)?;
for (k, v) in tmp_hashmap.into_iter() {
new_table
.insert(k.as_bytes(), v.as_str())
.map_err(Error::from)?;
}
}
// Quote proofs
{
const QUOTE_PROOFS_TABLE_NAME: &str = "quote_proofs";
const OLD_TABLE: MultimapTableDefinition<&str, [u8; 33]> =
MultimapTableDefinition::new(QUOTE_PROOFS_TABLE_NAME);
const NEW_TABLE: MultimapTableDefinition<[u8; 16], [u8; 33]> =
MultimapTableDefinition::new(QUOTE_PROOFS_TABLE_NAME);
let old_table = write_txn.open_multimap_table(OLD_TABLE)?;
let mut tmp_hashmap = HashMap::new();
for (k, v) in old_table.iter().map_err(Error::from)?.flatten() {
let quote_id = Uuid::try_parse(k.value()).unwrap();
let ys: Vec<[u8; 33]> = v.into_iter().flatten().map(|v| v.value()).collect();
tmp_hashmap.insert(quote_id, ys);
}
write_txn
.delete_multimap_table(old_table)
.map_err(Error::from)?;
let mut new_table = write_txn.open_multimap_table(NEW_TABLE)?;
for (quote_id, blind_messages) in tmp_hashmap.into_iter() {
for blind_message in blind_messages {
new_table
.insert(quote_id.as_bytes(), blind_message)
.map_err(Error::from)?;
}
}
}
// Quote signatures
{
const QUOTE_SIGNATURES_TABLE_NAME: &str = "quote_signatures";
const OLD_TABLE: MultimapTableDefinition<&str, [u8; 33]> =
MultimapTableDefinition::new(QUOTE_SIGNATURES_TABLE_NAME);
const NEW_TABLE: MultimapTableDefinition<[u8; 16], [u8; 33]> =
MultimapTableDefinition::new(QUOTE_SIGNATURES_TABLE_NAME);
let old_table = write_txn.open_multimap_table(OLD_TABLE)?;
let mut tmp_hashmap = HashMap::new();
for (k, v) in old_table.iter().map_err(Error::from)?.flatten() {
let quote_id = Uuid::try_parse(k.value()).unwrap();
let ys: Vec<[u8; 33]> = v.into_iter().flatten().map(|v| v.value()).collect();
tmp_hashmap.insert(quote_id, ys);
}
write_txn
.delete_multimap_table(old_table)
.map_err(Error::from)?;
let mut new_table = write_txn.open_multimap_table(NEW_TABLE)?;
for (quote_id, signatures) in tmp_hashmap.into_iter() {
for signature in signatures {
new_table
.insert(quote_id.as_bytes(), signature)
.map_err(Error::from)?;
}
}
}
// Melt requests
{
const MELT_REQUESTS_TABLE_NAME: &str = "melt_requests";
const OLD_TABLE: TableDefinition<&str, (&str, &str)> =
TableDefinition::new(MELT_REQUESTS_TABLE_NAME);
const NEW_TABLE: TableDefinition<[u8; 16], (&str, &str)> =
TableDefinition::new(MELT_REQUESTS_TABLE_NAME);
let old_table = write_txn.open_table(OLD_TABLE)?;
let mut tmp_hashmap = HashMap::new();
for (k, v) in old_table.iter().map_err(Error::from)?.flatten() {
let quote_id = Uuid::try_parse(k.value()).unwrap();
let value = v.value();
tmp_hashmap.insert(quote_id, (value.0.to_string(), value.1.to_string()));
}
write_txn.delete_table(old_table).map_err(Error::from)?;
let mut new_table = write_txn.open_table(NEW_TABLE)?;
for (k, v) in tmp_hashmap.into_iter() {
new_table
.insert(k.as_bytes(), (v.0.as_str(), v.1.as_str()))
.map_err(Error::from)?;
}
}
write_txn.commit().map_err(Error::from)?;
Ok(5)
}
/// Mint Quote Info
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
struct V1MintQuote {
pub id: Uuid,
pub mint_url: MintUrl,
pub amount: Amount,
pub unit: CurrencyUnit,
pub request: String,
pub state: MintQuoteState,
pub expiry: u64,
}
impl From<V1MintQuote> for MintQuote {
fn from(quote: V1MintQuote) -> MintQuote {
MintQuote {
id: quote.id,
amount: quote.amount,
unit: quote.unit,
request: quote.request.clone(),
state: quote.state,
expiry: quote.expiry,
request_lookup_id: Bolt11Invoice::from_str(&quote.request).unwrap().to_string(),
pubkey: None,
created_time: unix_time(),
paid_time: None,
issued_time: None,
}
}
}
fn migrate_mint_quotes_01_to_02(db: Arc<Database>) -> Result<(), Error> {
let read_txn = db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(ID_STR_MINT_QUOTES_TABLE)
.map_err(Error::from)?;
let mint_quotes: HashMap<String, Option<V1MintQuote>>;
{
mint_quotes = table
.iter()
.map_err(Error::from)?
.flatten()
.map(|(quote_id, mint_quote)| {
(
quote_id.value().to_string(),
serde_json::from_str(mint_quote.value()).ok(),
)
})
.collect();
}
let migrated_mint_quotes: HashMap<String, Option<MintQuote>> = mint_quotes
.into_iter()
.map(|(quote_id, quote)| (quote_id, quote.map(|q| q.into())))
.collect();
{
let write_txn = db.begin_write()?;
{
let mut table = write_txn
.open_table(ID_STR_MINT_QUOTES_TABLE)
.map_err(Error::from)?;
for (quote_id, quote) in migrated_mint_quotes {
match quote {
Some(quote) => {
let quote_str = serde_json::to_string(&quote)?;
table.insert(quote_id.as_str(), quote_str.as_str())?;
}
None => {
table.remove(quote_id.as_str())?;
}
}
}
}
write_txn.commit()?;
}
Ok(())
}
fn migrate_mint_proofs_02_to_03(db: Arc<Database>) -> Result<(), Error> {
let pending_proofs: Vec<([u8; 33], Option<Proof>)>;
let spent_proofs: Vec<([u8; 33], Option<Proof>)>;
{
let read_txn = db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(PENDING_PROOFS_TABLE)
.map_err(Error::from)?;
pending_proofs = table
.iter()
.map_err(Error::from)?
.flatten()
.map(|(quote_id, mint_quote)| {
(
quote_id.value(),
serde_json::from_str(mint_quote.value()).ok(),
)
})
.collect();
}
{
let read_txn = db.begin_read().map_err(Error::from)?;
let table = read_txn
.open_table(SPENT_PROOFS_TABLE)
.map_err(Error::from)?;
spent_proofs = table
.iter()
.map_err(Error::from)?
.flatten()
.map(|(quote_id, mint_quote)| {
(
quote_id.value(),
serde_json::from_str(mint_quote.value()).ok(),
)
})
.collect();
}
let write_txn = db.begin_write().map_err(Error::from)?;
{
let mut proofs_table = write_txn.open_table(PROOFS_TABLE).map_err(Error::from)?;
let mut state_table = write_txn
.open_table(PROOFS_STATE_TABLE)
.map_err(Error::from)?;
for (y, proof) in pending_proofs {
if let Some(proof) = proof {
proofs_table.insert(y, serde_json::to_string(&proof)?.as_str())?;
state_table.insert(y, State::Pending.to_string().as_str())?;
}
}
for (y, proof) in spent_proofs {
if let Some(proof) = proof {
proofs_table.insert(y, serde_json::to_string(&proof)?.as_str())?;
state_table.insert(y, State::Spent.to_string().as_str())?;
}
}
}
write_txn.commit()?;
Ok(())
}

View File

@@ -13,7 +13,6 @@ license.workspace = true
default = ["grpc", "sqlite"]
sqlite = ["cdk-sqlite"]
sqlcipher = ["cdk-sqlite/sqlcipher"]
redb = ["dep:cdk-redb"]
grpc = ["dep:tonic", "tokio/full", "dep:prost", "dep:tonic-build"]
[dependencies]
@@ -31,7 +30,6 @@ tracing.workspace = true
# main.rs dependencies
anyhow.workspace = true
cdk-sqlite = { workspace = true, features = ["mint", "auth"], optional = true }
cdk-redb = { workspace = true, features = ["mint", "auth"], optional = true }
clap = { workspace = true }
bip39.workspace = true
home.workspace = true

View File

@@ -13,8 +13,6 @@ use bip39::rand::{thread_rng, Rng};
use bip39::Mnemonic;
use cdk_common::database::MintKeysDatabase;
use cdk_common::CurrencyUnit;
#[cfg(feature = "redb")]
use cdk_redb::MintRedbDatabase;
use cdk_signatory::{db_signatory, start_grpc_server};
#[cfg(feature = "sqlite")]
use cdk_sqlite::MintSqliteDatabase;
@@ -122,17 +120,6 @@ pub async fn cli_main() -> Result<()> {
bail!("sqlite feature not enabled");
}
}
"redb" => {
#[cfg(feature = "redb")]
{
let redb_path = work_dir.join("cdk-cli.redb");
Arc::new(MintRedbDatabase::new(&redb_path)?)
}
#[cfg(not(feature = "redb"))]
{
bail!("redb feature not enabled");
}
}
_ => bail!("Unknown DB engine"),
};

View File

@@ -74,7 +74,7 @@ if [ "$2" = "external_signatory" ]; then
fi
echo "Starting fake mintd"
cargo run --bin cdk-mintd --features "redb" &
cargo run --bin cdk-mintd &
export CDK_MINTD_PID=$!
URL="http://$CDK_ITESTS_MINT_ADDR:$CDK_ITESTS_MINT_PORT/v1/info"

View File

@@ -55,12 +55,11 @@ export CDK_MINTD_FAKE_WALLET_SUPPORTED_UNITS="sat,usd"
export CDK_MINTD_MNEMONIC="eye survey guilt napkin crystal cup whisper salt luggage manage unveil loyal"
export CDK_MINTD_FAKE_WALLET_FEE_PERCENT="0"
export CDK_MINTD_FAKE_WALLET_RESERVE_FEE_MIN="1"
export CDK_MINTD_DATABASE="redb"
export CDK_MINTD_INPUT_FEE_PPK="100"
echo "Starting fake mintd"
cargo run --bin cdk-mintd --features "redb" &
cargo run --bin cdk-mintd &
CDK_MINTD_PID=$!
# Wait for the mint to be ready