mirror of
https://github.com/aljazceru/nutshell.git
synced 2025-12-20 18:44:20 +01:00
DB backups before migrations (#420)
* make backups before migrations * database tests * postgres db backup with location string * ignore version in pg_dump and throw warning * install latest pg_dump * install latest * pg update? * remove test from github * skip for postgres on github actions
This commit is contained in:
@@ -1,8 +1,44 @@
|
|||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
from ..core.db import COCKROACH, POSTGRES, SQLITE, Database, table_with_schema
|
from ..core.db import COCKROACH, POSTGRES, SQLITE, Database, table_with_schema
|
||||||
|
from ..core.settings import settings
|
||||||
|
|
||||||
|
|
||||||
|
async def backup_database(db: Database, version: int = 0) -> str:
|
||||||
|
# for postgres: use pg_dump
|
||||||
|
# for sqlite: use sqlite3
|
||||||
|
|
||||||
|
# skip backups if db_backup_path is None
|
||||||
|
# and if version is 0 (fresh database)
|
||||||
|
if not settings.db_backup_path or not version:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
filename = f"backup_{db.name}_{int(time.time())}_v{version}"
|
||||||
|
try:
|
||||||
|
# create backup directory if it doesn't exist
|
||||||
|
os.makedirs(os.path.join(settings.db_backup_path), exist_ok=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error creating backup directory: {e}. Run with BACKUP_DB_MIGRATION=False"
|
||||||
|
" to disable backups before database migrations."
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
filepath = os.path.join(settings.db_backup_path, filename)
|
||||||
|
|
||||||
|
if db.type == SQLITE:
|
||||||
|
filepath = f"{filepath}.sqlite3"
|
||||||
|
logger.info(f"Creating {db.type} backup of {db.name} db to {filepath}")
|
||||||
|
os.system(f"cp {db.path} {filepath}")
|
||||||
|
elif db.type in {POSTGRES, COCKROACH}:
|
||||||
|
filepath = f"{filepath}.dump"
|
||||||
|
logger.info(f"Creating {db.type} backup of {db.name} db to {filepath}")
|
||||||
|
os.system(f"pg_dump --dbname={db.db_location} --file={filepath}")
|
||||||
|
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
|
||||||
async def migrate_databases(db: Database, migrations_module):
|
async def migrate_databases(db: Database, migrations_module):
|
||||||
@@ -19,6 +55,21 @@ async def migrate_databases(db: Database, migrations_module):
|
|||||||
|
|
||||||
async def run_migration(db, migrations_module):
|
async def run_migration(db, migrations_module):
|
||||||
db_name = migrations_module.__name__.split(".")[-2]
|
db_name = migrations_module.__name__.split(".")[-2]
|
||||||
|
# we first check whether any migration is needed and create a backup if so
|
||||||
|
migration_needed = False
|
||||||
|
for key, migrate in migrations_module.__dict__.items():
|
||||||
|
match = matcher.match(key)
|
||||||
|
if match:
|
||||||
|
version = int(match.group(1))
|
||||||
|
if version > current_versions.get(db_name, 0):
|
||||||
|
migration_needed = True
|
||||||
|
break
|
||||||
|
if migration_needed:
|
||||||
|
logger.debug(f"Creating backup of {db_name} db")
|
||||||
|
current_version = current_versions.get(db_name, 0)
|
||||||
|
await backup_database(db, current_version)
|
||||||
|
|
||||||
|
# then we run the migrations
|
||||||
for key, migrate in migrations_module.__dict__.items():
|
for key, migrate in migrations_module.__dict__.items():
|
||||||
match = matcher.match(key)
|
match = matcher.match(key)
|
||||||
if match:
|
if match:
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ class EnvSettings(CashuSettings):
|
|||||||
cashu_dir: str = Field(default=os.path.join(str(Path.home()), ".cashu"))
|
cashu_dir: str = Field(default=os.path.join(str(Path.home()), ".cashu"))
|
||||||
debug_profiling: bool = Field(default=False)
|
debug_profiling: bool = Field(default=False)
|
||||||
debug_mint_only_deprecated: bool = Field(default=False)
|
debug_mint_only_deprecated: bool = Field(default=False)
|
||||||
|
db_backup_path: str = Field(default=False)
|
||||||
|
|
||||||
|
|
||||||
class MintSettings(CashuSettings):
|
class MintSettings(CashuSettings):
|
||||||
|
|||||||
@@ -497,3 +497,13 @@ async def m014_proofs_add_Y_column(db: Database):
|
|||||||
|
|
||||||
# recreate the balance views
|
# recreate the balance views
|
||||||
await create_balance_views(db, conn)
|
await create_balance_views(db, conn)
|
||||||
|
|
||||||
|
|
||||||
|
async def m015_add_index_Y_to_proofs_used(db: Database):
|
||||||
|
# create index on proofs_used table for Y
|
||||||
|
async with db.connect() as conn:
|
||||||
|
await conn.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS"
|
||||||
|
" proofs_used_Y_idx ON"
|
||||||
|
f" {table_with_schema(db, 'proofs_used')} (Y)"
|
||||||
|
)
|
||||||
|
|||||||
69
tests/test_db.py
Normal file
69
tests/test_db.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from cashu.core import db
|
||||||
|
from cashu.core.db import Connection, timestamp_now
|
||||||
|
from cashu.core.migrations import backup_database
|
||||||
|
from cashu.core.settings import settings
|
||||||
|
from cashu.mint.ledger import Ledger
|
||||||
|
from tests.helpers import is_github_actions, is_postgres
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
is_github_actions and is_postgres,
|
||||||
|
reason=(
|
||||||
|
"Fails on GitHub Actions because pg_dump is not the same version as postgres"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async def test_backup_db_migration(ledger: Ledger):
|
||||||
|
settings.db_backup_path = "./test_data/backups/"
|
||||||
|
filepath = await backup_database(ledger.db, 999)
|
||||||
|
assert os.path.exists(filepath)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_timestamp_now(ledger: Ledger):
|
||||||
|
ts = timestamp_now(ledger.db)
|
||||||
|
if ledger.db.type == db.SQLITE:
|
||||||
|
assert isinstance(ts, str)
|
||||||
|
assert int(ts) <= time.time()
|
||||||
|
elif ledger.db.type in {db.POSTGRES, db.COCKROACH}:
|
||||||
|
assert isinstance(ts, str)
|
||||||
|
datetime.datetime.strptime(ts, "%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_connection(ledger: Ledger):
|
||||||
|
async with ledger.db.connect() as conn:
|
||||||
|
assert isinstance(conn, Connection)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_db_tables(ledger: Ledger):
|
||||||
|
async with ledger.db.connect() as conn:
|
||||||
|
if ledger.db.type == db.SQLITE:
|
||||||
|
tables_res = await conn.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table';"
|
||||||
|
)
|
||||||
|
elif ledger.db.type in {db.POSTGRES, db.COCKROACH}:
|
||||||
|
tables_res = await conn.execute(
|
||||||
|
"SELECT table_name FROM information_schema.tables WHERE table_schema ="
|
||||||
|
" 'public';"
|
||||||
|
)
|
||||||
|
tables = [t[0] for t in await tables_res.fetchall()]
|
||||||
|
tables_expected = [
|
||||||
|
"dbversions",
|
||||||
|
"keysets",
|
||||||
|
"proofs_used",
|
||||||
|
"proofs_pending",
|
||||||
|
"melt_quotes",
|
||||||
|
"mint_quotes",
|
||||||
|
"mint_pubkeys",
|
||||||
|
"promises",
|
||||||
|
]
|
||||||
|
for table in tables_expected:
|
||||||
|
assert table in tables
|
||||||
Reference in New Issue
Block a user