Files
nutshell/cashu/core/migrations.py
lollerfirst c5ccf65e4d Bump SQLAlchemy to 2.0 (#626)
* `SQLALCHEMY_WARN_20=1` fixed all removed warnings.

* fix some mypy errors

* fix fetchone

* make format

* ignore annotations

* let's try like this?

* remove

* make format

* Update pyproject.toml

Co-authored-by: Pavol Rusnak <pavol@rusnak.io>

* extract _mapping in fetchone() and fetchall() + fix poetry lock

* fix

* make format

* fix integer indexing of row fields

* Update cashu/mint/crud.py

---------

Co-authored-by: Pavol Rusnak <pavol@rusnak.io>
Co-authored-by: callebtc <93376500+callebtc@users.noreply.github.com>
2024-10-05 13:32:32 +02:00

110 lines
4.2 KiB
Python

import os
import re
import time
from loguru import logger
from ..core.db import COCKROACH, POSTGRES, SQLITE, Database
from ..core.settings import settings
async def backup_database(db: Database, version: int = 0) -> str:
# for postgres: use pg_dump
# for sqlite: use sqlite3
# skip backups if db_backup_path is None
# and if version is 0 (fresh database)
if not settings.db_backup_path or not version:
return ""
filename = f"backup_{db.name}_{int(time.time())}_v{version}"
try:
# create backup directory if it doesn't exist
os.makedirs(os.path.join(settings.db_backup_path), exist_ok=True)
except Exception as e:
logger.error(
f"Error creating backup directory: {e}. Run with BACKUP_DB_MIGRATION=False"
" to disable backups before database migrations."
)
raise e
filepath = os.path.join(settings.db_backup_path, filename)
if db.type == SQLITE:
filepath = f"{filepath}.sqlite3"
logger.info(f"Creating {db.type} backup of {db.name} db to {filepath}")
os.system(f"cp {db.path} {filepath}")
elif db.type in {POSTGRES, COCKROACH}:
filepath = f"{filepath}.dump"
logger.info(f"Creating {db.type} backup of {db.name} db to {filepath}")
os.system(f"pg_dump --dbname={db.db_location} --file={filepath}")
return filepath
async def migrate_databases(db: Database, migrations_module):
"""Creates the necessary databases if they don't exist already; or migrates them."""
async def set_migration_version(conn, db_name, version):
await conn.execute(
f"""
INSERT INTO {db.table_with_schema('dbversions')} (db, version) VALUES (:db, :version)
ON CONFLICT (db) DO UPDATE SET version = :version
""",
{"db": db_name, "version": version},
)
async def run_migration(db, migrations_module):
db_name = migrations_module.__name__.split(".")[-2]
# we first check whether any migration is needed and create a backup if so
migration_needed = False
for key, migrate in migrations_module.__dict__.items():
match = matcher.match(key)
if match:
version = int(match.group(1))
if version > current_versions.get(db_name, 0):
migration_needed = True
break
if migration_needed and settings.db_backup_path:
logger.debug(f"Creating backup of {db_name} db")
current_version = current_versions.get(db_name, 0)
await backup_database(db, current_version)
# then we run the migrations
for key, migrate in migrations_module.__dict__.items():
match = matcher.match(key)
if match:
version = int(match.group(1))
if version > current_versions.get(db_name, 0):
logger.debug(f"Migrating {db_name} db: {key}")
await migrate(db)
if db.schema is None:
await set_migration_version(db, db_name, version)
else:
async with db.connect() as conn:
await set_migration_version(conn, db_name, version)
async with db.connect() as conn: # type: ignore
exists = None
if conn.type == SQLITE:
exists = await conn.fetchone(
"SELECT * FROM sqlite_master WHERE type='table' AND"
f" name='{db.table_with_schema('dbversions')}'"
)
elif conn.type in {POSTGRES, COCKROACH}:
exists = await conn.fetchone(
"SELECT * FROM information_schema.tables WHERE table_name ="
f" '{db.table_with_schema('dbversions')}'"
)
if not exists:
await migrations_module.m000_create_migrations_table(conn)
result = await conn.execute(
f"SELECT * FROM {db.table_with_schema('dbversions')}"
)
rows = result.all()
current_versions = {row._mapping["db"]: row._mapping["version"] for row in rows}
matcher = re.compile(r"^m(\d\d\d)_")
await run_migration(db, migrations_module)