mirror of
https://github.com/aljazceru/nutshell.git
synced 2026-01-04 17:34:20 +01:00
spark test
This commit is contained in:
@@ -112,6 +112,10 @@ MINT_BLINK_KEY=blink_abcdefgh
|
||||
# Use with StrikeWallet for BTC, USD, and EUR
|
||||
MINT_STRIKE_KEY=ABC123
|
||||
|
||||
# Use with SparkBackend
|
||||
# MINT_SPARK_API_KEY=your_spark_api_key
|
||||
# MINT_SPARK_MNEMONIC=your twelve word mnemonic phrase here
|
||||
|
||||
# fee to reserve in percent of the amount
|
||||
LIGHTNING_FEE_PERCENT=1.0
|
||||
# minimum fee to reserve
|
||||
|
||||
@@ -11,4 +11,5 @@ ENV PATH="/root/.local/bin:$PATH"
|
||||
WORKDIR /app
|
||||
COPY . .
|
||||
RUN poetry config virtualenvs.create false
|
||||
RUN poetry install --no-dev --no-root
|
||||
RUN poetry lock --no-update
|
||||
RUN poetry install --no-root
|
||||
|
||||
@@ -104,6 +104,14 @@ class MintBackends(MintSettings):
|
||||
mint_strike_key: str = Field(default=None)
|
||||
mint_blink_key: str = Field(default=None)
|
||||
|
||||
# Spark SDK settings
|
||||
mint_spark_api_key: str = Field(default=None)
|
||||
mint_spark_mnemonic: str = Field(default=None)
|
||||
mint_spark_network: str = Field(default="mainnet")
|
||||
mint_spark_storage_dir: str = Field(default="data/spark")
|
||||
mint_spark_connection_timeout: int = Field(default=30)
|
||||
mint_spark_retry_attempts: int = Field(default=3)
|
||||
|
||||
|
||||
class MintLimits(MintSettings):
|
||||
mint_rate_limit: bool = Field(
|
||||
|
||||
@@ -7,6 +7,7 @@ from .fake import FakeWallet # noqa: F401
|
||||
from .lnbits import LNbitsWallet # noqa: F401
|
||||
from .lnd_grpc.lnd_grpc import LndRPCWallet # noqa: F401
|
||||
from .lndrest import LndRestWallet # noqa: F401
|
||||
from .spark import SparkBackend # noqa: F401
|
||||
from .strike import StrikeWallet # noqa: F401
|
||||
|
||||
backend_settings = [
|
||||
|
||||
561
cashu/lightning/spark.py
Normal file
561
cashu/lightning/spark.py
Normal file
@@ -0,0 +1,561 @@
|
||||
import asyncio
|
||||
import math
|
||||
from typing import AsyncGenerator, Optional
|
||||
|
||||
from bolt11 import decode
|
||||
from loguru import logger
|
||||
|
||||
from ..core.base import Amount, MeltQuote, Unit
|
||||
from ..core.helpers import fee_reserve
|
||||
from ..core.models import PostMeltQuoteRequest
|
||||
from ..core.settings import settings
|
||||
from .base import (
|
||||
InvoiceResponse,
|
||||
LightningBackend,
|
||||
PaymentQuoteResponse,
|
||||
PaymentResponse,
|
||||
PaymentResult,
|
||||
PaymentStatus,
|
||||
StatusResponse,
|
||||
)
|
||||
|
||||
|
||||
def _extract_invoice_checking_id(payment) -> Optional[str]:
|
||||
"""Return a normalized identifier that matches the stored mint quote checking_id."""
|
||||
try:
|
||||
details = getattr(payment, "details", None)
|
||||
if details:
|
||||
logger.debug(
|
||||
"Spark extract: payment.id=%s type=%s details_type=%s has_invoice=%s has_bolt11=%s has_hash=%s",
|
||||
getattr(payment, "id", None),
|
||||
type(payment),
|
||||
type(details),
|
||||
hasattr(details, "invoice"),
|
||||
hasattr(details, "bolt11_invoice"),
|
||||
hasattr(details, "payment_hash"),
|
||||
)
|
||||
invoice = getattr(details, "invoice", None)
|
||||
if invoice:
|
||||
logger.debug("Spark extract: using details.invoice=%s", invoice)
|
||||
return invoice.lower()
|
||||
|
||||
bolt11_details = getattr(details, "bolt11_invoice", None)
|
||||
if bolt11_details:
|
||||
bolt11 = getattr(bolt11_details, "bolt11", None)
|
||||
if bolt11:
|
||||
logger.debug("Spark extract: using bolt11_details.bolt11=%s", bolt11)
|
||||
return bolt11.lower()
|
||||
|
||||
payment_hash = getattr(details, "payment_hash", None)
|
||||
if payment_hash:
|
||||
logger.debug("Spark extract: using details.payment_hash=%s", payment_hash)
|
||||
return payment_hash.lower()
|
||||
|
||||
payment_hash = getattr(payment, "payment_hash", None)
|
||||
if payment_hash:
|
||||
logger.debug("Spark extract: using payment.payment_hash=%s", payment_hash)
|
||||
return payment_hash.lower()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
logger.error(f"Failed to extract Spark invoice identifier: {exc}")
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Import Spark SDK components
|
||||
try:
|
||||
from breez_sdk_spark import (
|
||||
BreezSdk,
|
||||
connect,
|
||||
ConnectRequest,
|
||||
default_config,
|
||||
EventListener,
|
||||
GetInfoRequest,
|
||||
GetPaymentRequest,
|
||||
ListPaymentsRequest,
|
||||
Network,
|
||||
PaymentStatus as SparkPaymentStatus,
|
||||
PaymentType,
|
||||
ReceivePaymentMethod,
|
||||
ReceivePaymentRequest,
|
||||
PrepareSendPaymentRequest,
|
||||
SdkEvent,
|
||||
SendPaymentRequest,
|
||||
SendPaymentOptions,
|
||||
)
|
||||
except ImportError:
|
||||
# Create dummy classes for when SDK is not available
|
||||
BreezSdk = None
|
||||
EventListener = None
|
||||
SparkPaymentStatus = None
|
||||
logger.warning("Breez SDK Spark not available - SparkBackend will not function")
|
||||
|
||||
|
||||
if EventListener is not None:
|
||||
class SparkEventListener(EventListener):
|
||||
"""Event listener for Spark SDK payment notifications"""
|
||||
|
||||
def __init__(self, queue: asyncio.Queue):
|
||||
super().__init__()
|
||||
self.queue = queue
|
||||
|
||||
def on_event(self, event: SdkEvent) -> None:
|
||||
"""Handle SDK events in a thread-safe manner"""
|
||||
try:
|
||||
# Check if this is a payment event we care about and extract the invoice id
|
||||
if hasattr(event, "payment"):
|
||||
payment = event.payment
|
||||
checking_id = _extract_invoice_checking_id(payment)
|
||||
logger.debug(
|
||||
"Spark event %s payment_type=%s status=%s payment_id=%s raw_payment=%r extracted_id=%s",
|
||||
event.__class__.__name__,
|
||||
getattr(payment, "payment_type", None),
|
||||
getattr(payment, "status", None),
|
||||
getattr(payment, "id", None),
|
||||
payment,
|
||||
checking_id,
|
||||
)
|
||||
if not checking_id:
|
||||
logger.debug("Spark event %s ignored (no checking id)", event.__class__.__name__)
|
||||
return
|
||||
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
# Thread-safe queue put with payment hash (checking_id)
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.queue.put(checking_id),
|
||||
loop,
|
||||
)
|
||||
except RuntimeError:
|
||||
logger.warning("No running event loop found for Spark event")
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling Spark event: {e}")
|
||||
else:
|
||||
class SparkEventListener:
|
||||
"""Dummy event listener when Spark SDK is not available"""
|
||||
|
||||
def __init__(self, queue: asyncio.Queue):
|
||||
self.queue = queue
|
||||
|
||||
def on_event(self, event) -> None:
|
||||
"""Dummy event handler"""
|
||||
logger.warning("SparkEventListener called but Spark SDK not available")
|
||||
|
||||
|
||||
class SparkBackend(LightningBackend):
|
||||
"""Breez Spark SDK Lightning backend implementation"""
|
||||
|
||||
supported_units = {Unit.sat, Unit.msat}
|
||||
supports_mpp = False
|
||||
supports_incoming_payment_stream = True
|
||||
supports_description = True
|
||||
unit = Unit.sat
|
||||
|
||||
def __init__(self, unit: Unit = Unit.sat, **kwargs):
|
||||
if BreezSdk is None:
|
||||
raise Exception("Breez SDK not available - install breez-sdk")
|
||||
|
||||
self.assert_unit_supported(unit)
|
||||
self.unit = unit
|
||||
self.sdk: Optional[BreezSdk] = None
|
||||
self.event_queue: Optional[asyncio.Queue] = None
|
||||
self.listener: Optional[SparkEventListener] = None
|
||||
self.listener_id: Optional[str] = None
|
||||
self._initialized = False
|
||||
self._initialization_lock = asyncio.Lock()
|
||||
self._connection_retry_count = 0
|
||||
self._max_retries = getattr(settings, 'mint_spark_retry_attempts', 3)
|
||||
self._retry_delay = 5.0
|
||||
|
||||
# Validate required settings
|
||||
if not settings.mint_spark_api_key:
|
||||
raise Exception("MINT_SPARK_API_KEY not set")
|
||||
if not settings.mint_spark_mnemonic:
|
||||
raise Exception("MINT_SPARK_MNEMONIC not set")
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context manager entry"""
|
||||
await self._ensure_initialized()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Async context manager exit"""
|
||||
await self.cleanup()
|
||||
|
||||
async def _ensure_initialized(self) -> None:
|
||||
"""Lazy initialization with proper error handling"""
|
||||
if self._initialized and self.sdk:
|
||||
return
|
||||
|
||||
async with self._initialization_lock:
|
||||
if self._initialized and self.sdk:
|
||||
return
|
||||
|
||||
try:
|
||||
await self._initialize_sdk_with_retry()
|
||||
self._initialized = True
|
||||
except Exception as e:
|
||||
logger.error(f"SDK initialization failed: {e}")
|
||||
raise
|
||||
|
||||
async def _initialize_sdk_with_retry(self) -> None:
|
||||
"""Initialize SDK with exponential backoff retry"""
|
||||
for attempt in range(self._max_retries):
|
||||
try:
|
||||
await self._initialize_sdk()
|
||||
self._connection_retry_count = 0
|
||||
return
|
||||
except Exception as e:
|
||||
self._connection_retry_count += 1
|
||||
if attempt == self._max_retries - 1:
|
||||
raise
|
||||
|
||||
delay = self._retry_delay * (2 ** attempt)
|
||||
logger.warning(
|
||||
f"SDK init attempt {attempt + 1} failed: {e}. "
|
||||
f"Retrying in {delay}s"
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
|
||||
async def _initialize_sdk(self) -> None:
|
||||
"""Initialize the Spark SDK connection"""
|
||||
mnemonic = settings.mint_spark_mnemonic
|
||||
|
||||
# Determine network
|
||||
network_str = getattr(settings, 'mint_spark_network', 'mainnet').lower()
|
||||
network = Network.MAINNET if network_str == 'mainnet' else Network.TESTNET
|
||||
|
||||
config = default_config(network=network)
|
||||
config.api_key = settings.mint_spark_api_key
|
||||
|
||||
storage_dir = getattr(settings, 'mint_spark_storage_dir', 'data/spark')
|
||||
connection_timeout = getattr(settings, 'mint_spark_connection_timeout', 30)
|
||||
|
||||
# ConnectRequest takes mnemonic directly, not a Seed object
|
||||
self.sdk = await asyncio.wait_for(
|
||||
connect(
|
||||
request=ConnectRequest(
|
||||
config=config,
|
||||
mnemonic=mnemonic,
|
||||
storage_dir=storage_dir
|
||||
)
|
||||
),
|
||||
timeout=connection_timeout
|
||||
)
|
||||
|
||||
# Set up event listener for payment notifications
|
||||
self.event_queue = asyncio.Queue()
|
||||
self.listener = SparkEventListener(self.event_queue)
|
||||
# add_event_listener is not async, it returns a string ID directly
|
||||
self.listener_id = self.sdk.add_event_listener(listener=self.listener)
|
||||
logger.debug(f"Spark SDK initialized successfully on {network_str} network")
|
||||
|
||||
# Clear mnemonic from memory
|
||||
mnemonic = None
|
||||
del mnemonic
|
||||
|
||||
async def cleanup(self) -> None:
|
||||
"""Proper resource cleanup"""
|
||||
try:
|
||||
if hasattr(self, 'listener_id') and self.sdk:
|
||||
# remove_event_listener is not async
|
||||
self.sdk.remove_event_listener(id=self.listener_id)
|
||||
|
||||
if self.sdk:
|
||||
# disconnect is not async
|
||||
self.sdk.disconnect()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Cleanup error: {e}")
|
||||
finally:
|
||||
self.sdk = None
|
||||
self.listener = None
|
||||
self.event_queue = None
|
||||
self._initialized = False
|
||||
|
||||
async def _check_connectivity(self) -> bool:
|
||||
"""Quick connectivity check"""
|
||||
try:
|
||||
if not self.sdk:
|
||||
return False
|
||||
await asyncio.wait_for(
|
||||
self.sdk.get_info(request=GetInfoRequest()),
|
||||
timeout=5.0
|
||||
)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def status(self) -> StatusResponse:
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
info = await self.sdk.get_info(request=GetInfoRequest())
|
||||
return StatusResponse(
|
||||
balance=Amount(Unit.sat, info.balance_sats),
|
||||
error_message=None
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Spark status error: {e}")
|
||||
return StatusResponse(
|
||||
error_message=f"Failed to connect to Spark SDK: {e}",
|
||||
balance=Amount(self.unit, 0)
|
||||
)
|
||||
|
||||
async def create_invoice(
|
||||
self,
|
||||
amount: Amount,
|
||||
memo: Optional[str] = None,
|
||||
description_hash: Optional[bytes] = None,
|
||||
unhashed_description: Optional[bytes] = None,
|
||||
) -> InvoiceResponse:
|
||||
self.assert_unit_supported(amount.unit)
|
||||
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
|
||||
payment_method = ReceivePaymentMethod.BOLT11_INVOICE(
|
||||
description=memo or "",
|
||||
amount_sats=amount.to(Unit.sat).amount
|
||||
)
|
||||
request = ReceivePaymentRequest(payment_method=payment_method)
|
||||
response = await self.sdk.receive_payment(request=request)
|
||||
logger.debug(
|
||||
"Spark create_invoice amount=%s response.payment_request=%s",
|
||||
amount,
|
||||
response.payment_request,
|
||||
)
|
||||
|
||||
return InvoiceResponse(
|
||||
ok=True,
|
||||
checking_id=response.payment_request.lower(),
|
||||
payment_request=response.payment_request
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Spark create_invoice error for amount {amount}: {e}")
|
||||
return InvoiceResponse(ok=False, error_message=f"Invoice creation failed: {e}")
|
||||
|
||||
async def pay_invoice(
|
||||
self, quote: MeltQuote, fee_limit_msat: int
|
||||
) -> PaymentResponse:
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
|
||||
# Prepare the payment
|
||||
prepare_request = PrepareSendPaymentRequest(
|
||||
payment_request=quote.request,
|
||||
amount=None # Use invoice amount
|
||||
)
|
||||
prepare_response = await self.sdk.prepare_send_payment(request=prepare_request)
|
||||
|
||||
# Send the payment
|
||||
options = SendPaymentOptions.BOLT11_INVOICE(
|
||||
prefer_spark=False,
|
||||
completion_timeout_secs=30
|
||||
)
|
||||
send_request = SendPaymentRequest(
|
||||
prepare_response=prepare_response,
|
||||
options=options
|
||||
)
|
||||
send_response = await self.sdk.send_payment(request=send_request)
|
||||
|
||||
payment = send_response.payment
|
||||
logger.debug(
|
||||
"Spark pay_invoice quote=%s result_payment_id=%s status=%s type=%s raw_payment=%r",
|
||||
quote.quote,
|
||||
getattr(payment, "id", None),
|
||||
getattr(payment, "status", None),
|
||||
getattr(payment, "payment_type", None),
|
||||
payment,
|
||||
)
|
||||
|
||||
# Map Spark payment status to PaymentResult
|
||||
result = self._map_payment_status(payment)
|
||||
|
||||
return PaymentResponse(
|
||||
result=result,
|
||||
checking_id=payment.id,
|
||||
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
|
||||
preimage=payment.preimage if hasattr(payment, 'preimage') else None
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Spark pay_invoice error for quote {quote.quote}: {e}")
|
||||
return PaymentResponse(
|
||||
result=PaymentResult.FAILED,
|
||||
error_message=f"Payment failed: {e}"
|
||||
)
|
||||
|
||||
async def get_invoice_status(self, checking_id: str) -> PaymentStatus:
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
|
||||
# For Spark SDK, checking_id is the Lightning invoice/payment_request
|
||||
# We need to get all payments and find the one with this payment_request
|
||||
from .base import PaymentResult
|
||||
|
||||
# List all recent payments to find our invoice
|
||||
list_request = ListPaymentsRequest()
|
||||
list_response = await self.sdk.list_payments(request=list_request)
|
||||
normalized_checking_id = checking_id.lower()
|
||||
|
||||
for payment in list_response.payments:
|
||||
payment_checking_id = _extract_invoice_checking_id(payment)
|
||||
logger.debug(
|
||||
"Spark get_invoice_status candidate id=%s target=%s status=%s payment_id=%s raw_payment=%r",
|
||||
payment_checking_id,
|
||||
normalized_checking_id,
|
||||
getattr(payment, "status", None),
|
||||
getattr(payment, "id", None),
|
||||
payment,
|
||||
)
|
||||
if payment_checking_id and payment_checking_id == normalized_checking_id:
|
||||
# Found our payment - return its status
|
||||
result = self._map_payment_status(payment)
|
||||
return PaymentStatus(
|
||||
result=result,
|
||||
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
|
||||
preimage=payment.preimage if hasattr(payment, 'preimage') else None
|
||||
)
|
||||
|
||||
# If not found in payments list, invoice is still pending
|
||||
return PaymentStatus(
|
||||
result=PaymentResult.PENDING,
|
||||
error_message=None
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Get invoice status error: {e}")
|
||||
return PaymentStatus(
|
||||
result=PaymentResult.UNKNOWN,
|
||||
error_message=str(e)
|
||||
)
|
||||
|
||||
async def get_payment_status(self, checking_id: str) -> PaymentStatus:
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
|
||||
# Get payment by payment ID
|
||||
get_request = GetPaymentRequest(payment_id=checking_id)
|
||||
response = await self.sdk.get_payment(request=get_request)
|
||||
payment = response.payment
|
||||
|
||||
# Map Spark payment status to PaymentResult
|
||||
result = self._map_payment_status(payment)
|
||||
|
||||
return PaymentStatus(
|
||||
result=result,
|
||||
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
|
||||
preimage=payment.preimage if hasattr(payment, 'preimage') else None
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Get payment status error: {e}")
|
||||
return PaymentStatus(
|
||||
result=PaymentResult.UNKNOWN,
|
||||
error_message=str(e)
|
||||
)
|
||||
|
||||
def _map_payment_status(self, payment) -> PaymentResult:
|
||||
"""Map Spark SDK payment status to PaymentResult enum."""
|
||||
if not hasattr(payment, 'status'):
|
||||
return PaymentResult.UNKNOWN
|
||||
|
||||
# Use official PaymentStatus enum for more reliable mapping
|
||||
try:
|
||||
if payment.status == SparkPaymentStatus.COMPLETED:
|
||||
return PaymentResult.SETTLED
|
||||
elif payment.status == SparkPaymentStatus.FAILED:
|
||||
return PaymentResult.FAILED
|
||||
elif payment.status == SparkPaymentStatus.PENDING:
|
||||
return PaymentResult.PENDING
|
||||
else:
|
||||
# Fallback to string comparison for any new status values
|
||||
status_str = str(payment.status).lower()
|
||||
if 'complete' in status_str or 'settled' in status_str or 'succeeded' in status_str:
|
||||
return PaymentResult.SETTLED
|
||||
elif 'failed' in status_str or 'cancelled' in status_str or 'expired' in status_str:
|
||||
return PaymentResult.FAILED
|
||||
elif 'pending' in status_str or 'processing' in status_str:
|
||||
return PaymentResult.PENDING
|
||||
else:
|
||||
return PaymentResult.UNKNOWN
|
||||
except (AttributeError, TypeError):
|
||||
# Fallback to string-based mapping if enum comparison fails
|
||||
status_str = str(payment.status).lower()
|
||||
if 'complete' in status_str or 'settled' in status_str or 'succeeded' in status_str:
|
||||
return PaymentResult.SETTLED
|
||||
elif 'failed' in status_str or 'cancelled' in status_str or 'expired' in status_str:
|
||||
return PaymentResult.FAILED
|
||||
elif 'pending' in status_str or 'processing' in status_str:
|
||||
return PaymentResult.PENDING
|
||||
else:
|
||||
return PaymentResult.UNKNOWN
|
||||
|
||||
async def get_payment_quote(
|
||||
self, melt_quote: PostMeltQuoteRequest
|
||||
) -> PaymentQuoteResponse:
|
||||
invoice_obj = decode(melt_quote.request)
|
||||
assert invoice_obj.amount_msat, "invoice has no amount."
|
||||
amount_msat = int(invoice_obj.amount_msat)
|
||||
|
||||
# Use standard fee calculation for now
|
||||
# TODO: Use Spark SDK's fee estimation when available
|
||||
fees_msat = fee_reserve(amount_msat)
|
||||
fees = Amount(unit=Unit.msat, amount=fees_msat)
|
||||
amount = Amount(unit=Unit.msat, amount=amount_msat)
|
||||
|
||||
return PaymentQuoteResponse(
|
||||
checking_id=invoice_obj.payment_hash,
|
||||
fee=fees.to(self.unit, round="up"),
|
||||
amount=amount.to(self.unit, round="up"),
|
||||
)
|
||||
|
||||
async def paid_invoices_stream(self) -> AsyncGenerator[str, None]:
|
||||
"""Stream of paid invoice notifications with resilience"""
|
||||
await self._ensure_initialized()
|
||||
|
||||
retry_delay = settings.mint_retry_exponential_backoff_base_delay
|
||||
max_retry_delay = settings.mint_retry_exponential_backoff_max_delay
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Set timeout to prevent infinite blocking
|
||||
payment_id = await asyncio.wait_for(
|
||||
self.event_queue.get(),
|
||||
timeout=30.0
|
||||
)
|
||||
logger.debug("Spark paid_invoices_stream emitting checking_id=%s", payment_id)
|
||||
yield payment_id
|
||||
|
||||
# Reset retry delay on success
|
||||
retry_delay = settings.mint_retry_exponential_backoff_base_delay
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# Periodic connectivity check
|
||||
if not await self._check_connectivity():
|
||||
logger.warning("Spark connectivity lost, attempting reconnection")
|
||||
self._initialized = False
|
||||
await self._ensure_initialized()
|
||||
else:
|
||||
logger.debug("Spark paid_invoices_stream heartbeat (no events)")
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Spark payment stream error: {e}")
|
||||
await asyncio.sleep(retry_delay)
|
||||
|
||||
# Exponential backoff
|
||||
retry_delay = max(
|
||||
settings.mint_retry_exponential_backoff_base_delay,
|
||||
min(retry_delay * 2, max_retry_delay)
|
||||
)
|
||||
|
||||
# Attempt recovery
|
||||
if not self._initialized:
|
||||
await self._ensure_initialized()
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Perform comprehensive health check"""
|
||||
try:
|
||||
await self._ensure_initialized()
|
||||
return await self._check_connectivity()
|
||||
except Exception as e:
|
||||
logger.warning(f"Spark health check failed: {e}")
|
||||
return False
|
||||
@@ -35,6 +35,8 @@ for key, value in settings.dict().items():
|
||||
"mint_lnbits_key",
|
||||
"mint_blink_key",
|
||||
"mint_strike_key",
|
||||
"mint_spark_api_key",
|
||||
"mint_spark_mnemonic",
|
||||
"mint_lnd_rest_macaroon",
|
||||
"mint_lnd_rest_admin_macaroon",
|
||||
"mint_lnd_rest_invoice_macaroon",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
version: "3"
|
||||
services:
|
||||
mint:
|
||||
build:
|
||||
|
||||
43
poetry.lock
generated
43
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiosqlite"
|
||||
@@ -197,6 +197,45 @@ bitstring = "*"
|
||||
click = "*"
|
||||
coincurve = "*"
|
||||
|
||||
[[package]]
|
||||
name = "breez-sdk-spark"
|
||||
version = "0.1.9"
|
||||
description = "Python language bindings for the Breez Spark SDK"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "breez_sdk_spark-0.1.9-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7fb6c702f77f54306ebeafc5974461290e96c143df6f1a5afb974b62576b7972"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:041b5ac3d6b2ec9c5ffec1a46ed9874376ad16bfd50bc28c2fc2a9a51a473bab"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:86a78f09cdc4dc8fbcc995073e11bdc0c06982e00b214e0f0210b15fb663e1a3"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp310-cp310-win32.whl", hash = "sha256:01c64566d2146b5deabb9e7291c3ebcc400bb054ef8391cc2ca50480c03015c2"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:4c6906390454f6952bf2b923949d13eefcdccdf8da073496f0023bba41431933"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a5467d289d0245d31343f6d10744071cd092cd2b108fdf5e76e1b2f9e63c4a28"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:afadd71fb816c24ebebbc709a13c1c06091941de31c9add321316c982f06652a"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:5fd1e2e0c381bc4c142c748f4f29b43fb089b8bf909dd55107f63ff8b5fd733d"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp311-cp311-win32.whl", hash = "sha256:e53cb1890643f697316f4aad7c831794a798f3d6e8f288aa8906b841cc582678"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:4d7e73759ccb1da1fca587ffe6f72d166459b34d7f6614de23352860306538ec"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5e944b035fecd6de90b9d18eb8f32b990f662880bd91fb847011f10d12e88367"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:6991e3cd3025ee153eb2a844cf06b1892c196a6079ee3db24e449e55aee06544"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:2ceb597a3bff6755525234ed07892dc82034c745fd9578629b3db85433907165"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp312-cp312-win32.whl", hash = "sha256:67d4e9dbc5779d8cd67e879be06006f714ec46b3d8c55ca9f08efae1e21e8889"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:e418d47aad183cd80a5bd4629b90511e7624676db8996c0a83be42f2d528f650"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d966f858bd889b3a509d8f61d5f1e3f72fd1fab7e1b9830b523363fe357d60fa"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:66763c74afdf2a5c4c4f5248666f87725c76c5853685a377467c3935516576b7"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:ca125db55d0a04b08956c2340b2f8091b55848e5e77c4569128badbf1eca6991"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp313-cp313-win32.whl", hash = "sha256:3ea9576e9fcde542ff0dda8df4e3dc20147bb7a93eecf8312b393d825ea170c9"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp313-cp313-win_amd64.whl", hash = "sha256:540baacc12287f7e8828e6bbbdf3e6aa9b1aaaf1cb85a0980c0a6e70933bfb3d"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1159b0e92581a30e05936e7997e19ef682b1d93057d5136847185a01145eb25c"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp38-cp38-manylinux_2_31_aarch64.whl", hash = "sha256:84f91b21a7698938dc7707fd31be10d7449347870ba3d07e7f3ff6260c8de754"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:374e9f486d4e576aa3e55e1aab912c474e4d96115a5b678c1f95a08610e79968"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp38-cp38-win32.whl", hash = "sha256:326251dff34f0c19614746eb345bc9906785a071a3eaa7da7c54ea4076557b4c"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:a205f11710fd1720634632a20e502d27fbdd1d400d94b5545ca74a672f509b1b"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:aa3ffaf682818608a38582b76db92f4fa2ffdf44634e2163014e594722513106"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp39-cp39-manylinux_2_31_aarch64.whl", hash = "sha256:7216030f2cea3d3e5fa3a4e98fda7245334de9e96ce2186bdfd11f49bb1872f7"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:2f4512dd96bf2c7eda940db717fc961e20716dda6a38095e63322c38437a43c2"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp39-cp39-win32.whl", hash = "sha256:582855db91d81fe9f0f9b340e54e54d55b16d76b890467f77178b777561da9d6"},
|
||||
{file = "breez_sdk_spark-0.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:9f5c6110e9f378bdf446303b0f486dc221c96691710cc453a06ea5f5c12ee226"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "1.1.0"
|
||||
@@ -2657,4 +2696,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "5007f3202dedffb266c3bb0ba3101141a6d865e6979185a0ab6ea7d08c13213c"
|
||||
content-hash = "af82f494ec3ac5f839968129579cd5986e3ff6994f0d93fcb0ddcec4374fc01f"
|
||||
|
||||
@@ -44,6 +44,7 @@ redis = "^5.1.1"
|
||||
brotli = "^1.1.0"
|
||||
zstandard = "^0.23.0"
|
||||
jinja2 = "^3.1.5"
|
||||
breez-sdk-spark = "^0.1.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest-asyncio = "^0.24.0"
|
||||
|
||||
Reference in New Issue
Block a user