This commit is contained in:
2025-11-22 17:48:36 +01:00
parent 077e3690f3
commit afb2e745a6
5 changed files with 104 additions and 145 deletions

View File

@@ -11,6 +11,4 @@ ENV PATH="/root/.local/bin:$PATH"
WORKDIR /app
COPY . .
RUN poetry config virtualenvs.create false
RUN poetry lock --no-update
# Install all dependencies including breez-sdk-spark (now installs 0.3.4 automatically)
RUN poetry install --no-root --all-extras
RUN poetry install --no-dev --no-root

View File

@@ -5,9 +5,9 @@ from typing import List, Optional
from environs import Env # type: ignore
from pydantic import Field
try:
from pydantic_settings import BaseSettings
from pydantic import ConfigDict
except ImportError:
from pydantic import BaseSettings, Extra
@@ -34,21 +34,11 @@ class CashuSettings(BaseSettings):
lightning_reserve_fee_min: int = Field(default=2000)
max_order: int = Field(default=64)
try:
# Pydantic v2 style
model_config = ConfigDict(
env_file=find_env_file(),
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore"
)
except NameError:
# Pydantic v1 style fallback
class Config(BaseSettings.Config):
env_file = find_env_file()
env_file_encoding = "utf-8"
case_sensitive = False
extra = Extra.ignore
class Config(BaseSettings.Config):
env_file = find_env_file()
env_file_encoding = "utf-8"
case_sensitive = False
extra = Extra.ignore
class EnvSettings(CashuSettings):

View File

@@ -214,6 +214,16 @@ except ImportError as e:
)
# Payment status mapping
SPARK_PAYMENT_RESULT_MAP = {}
if SparkPaymentStatus is not None:
SPARK_PAYMENT_RESULT_MAP = {
SparkPaymentStatus.COMPLETED: PaymentResult.SETTLED,
SparkPaymentStatus.FAILED: PaymentResult.FAILED,
SparkPaymentStatus.PENDING: PaymentResult.PENDING,
}
def _get_payment_amount_sats(payment) -> Optional[int]:
"""Return the payment amount in satoshis if available."""
amount = getattr(payment, "amount", None)
@@ -390,7 +400,10 @@ else:
class SparkBackend(LightningBackend):
"""Breez Spark SDK Lightning backend implementation"""
"""Breez Spark SDK Lightning backend implementation
https://docs.breez.technology/guide/spark_overview.html
"""
supported_units = {Unit.sat, Unit.msat}
supports_mpp = False
@@ -416,10 +429,8 @@ class SparkBackend(LightningBackend):
self._retry_delay = 5.0
# Validate required settings
if not settings.mint_spark_api_key:
raise Exception("MINT_SPARK_API_KEY not set")
if not settings.mint_spark_mnemonic:
raise Exception("MINT_SPARK_MNEMONIC not set")
assert settings.mint_spark_api_key, "MINT_SPARK_API_KEY not set"
assert settings.mint_spark_mnemonic, "MINT_SPARK_MNEMONIC not set"
async def __aenter__(self):
"""Async context manager entry"""
@@ -573,12 +584,19 @@ class SparkBackend(LightningBackend):
try:
await self._ensure_initialized()
assert self.sdk is not None
info = await self.sdk.get_info(request=GetInfoRequest(ensure_synced=None))
# Wait for full sync to get accurate balance
info = await self.sdk.get_info(request=GetInfoRequest(ensure_synced=True))
balance_sats = info.balance_sats
return StatusResponse(
balance=Amount(Unit.sat, info.balance_sats), error_message=None
balance=Amount(Unit.sat, balance_sats), error_message=None
)
except Exception as e:
logger.error(f"Spark status error: {e}")
import traceback
logger.error(f"Spark status traceback: {traceback.format_exc()}")
return StatusResponse(
error_message=f"Failed to connect to Spark SDK: {e}",
balance=Amount(self.unit, 0),
@@ -830,34 +848,10 @@ class SparkBackend(LightningBackend):
if not hasattr(payment, "status"):
return PaymentResult.UNKNOWN
# Use official PaymentStatus enum for more reliable mapping
# Use status mapping constant
try:
if payment.status == SparkPaymentStatus.COMPLETED:
return PaymentResult.SETTLED
elif payment.status == SparkPaymentStatus.FAILED:
return PaymentResult.FAILED
elif payment.status == SparkPaymentStatus.PENDING:
return PaymentResult.PENDING
else:
# Fallback to string comparison for any new status values
status_str = str(payment.status).lower()
if (
"complete" in status_str
or "settled" in status_str
or "succeeded" in status_str
):
return PaymentResult.SETTLED
elif (
"failed" in status_str
or "cancelled" in status_str
or "expired" in status_str
):
return PaymentResult.FAILED
elif "pending" in status_str or "processing" in status_str:
return PaymentResult.PENDING
else:
return PaymentResult.UNKNOWN
except (AttributeError, TypeError):
return SPARK_PAYMENT_RESULT_MAP.get(payment.status, PaymentResult.UNKNOWN)
except (AttributeError, TypeError, KeyError):
# Fallback to string-based mapping if enum comparison fails
status_str = str(payment.status).lower()
if (
@@ -897,7 +891,12 @@ class SparkBackend(LightningBackend):
)
async def paid_invoices_stream(self) -> AsyncGenerator[str, None]:
"""Stream of paid invoice notifications with resilience"""
"""Stream of paid invoice notifications with built-in verification
Includes payment verification within the stream to ensure only settled
payments are yielded. This prevents false positives from reaching the
mint's payment processing logic.
"""
await self._ensure_initialized()
retry_delay = settings.mint_retry_exponential_backoff_base_delay
@@ -910,9 +909,26 @@ class SparkBackend(LightningBackend):
payment_id = await asyncio.wait_for(
self.event_queue.get(), timeout=30.0
)
logger.debug(
f"Spark paid_invoices_stream emitting checking_id={payment_id}"
)
# Verify payment is actually settled before yielding
try:
status = await self.get_invoice_status(payment_id)
if not status.settled:
logger.debug(
f"Spark: payment event not settled, skipping: {payment_id[:20]}..."
)
continue
logger.debug(
f"Spark: verified settled payment: {payment_id[:20]}..."
)
except Exception as verify_exc:
logger.error(
f"Spark: payment verification failed for {payment_id[:20]}...: {verify_exc}"
)
continue
# Only yield verified, settled payments
yield payment_id
# Reset retry delay on success

View File

@@ -3,7 +3,7 @@ from typing import List
from loguru import logger
from ..core.base import MintQuoteState, Method, Unit
from ..core.base import MintQuoteState
from ..core.settings import settings
from ..lightning.base import LightningBackend
from .protocols import SupportsBackends, SupportsDb, SupportsEvents
@@ -24,7 +24,7 @@ class LedgerTasks(SupportsDb, SupportsBackends, SupportsEvents):
if backend.supports_incoming_payment_stream:
retry_delay = settings.mint_retry_exponential_backoff_base_delay
max_retry_delay = settings.mint_retry_exponential_backoff_max_delay
while True:
try:
# Reset retry delay on successful connection to backend stream
@@ -33,9 +33,11 @@ class LedgerTasks(SupportsDb, SupportsBackends, SupportsEvents):
await self.invoice_callback_dispatcher(checking_id)
except Exception as e:
logger.error(f"Error in invoice listener: {e}")
logger.info(f"Restarting invoice listener in {retry_delay} seconds...")
logger.info(
f"Restarting invoice listener in {retry_delay} seconds..."
)
await asyncio.sleep(retry_delay)
# Exponential backoff
retry_delay = min(retry_delay * 2, max_retry_delay)
@@ -58,14 +60,6 @@ class LedgerTasks(SupportsDb, SupportsBackends, SupportsEvents):
)
# set the quote as paid
if quote.unpaid:
confirmed = await self._confirm_invoice_paid_with_backend(quote)
if not confirmed:
logger.debug(
"Invoice callback ignored for %s; backend still reports %s",
quote.quote,
"pending" if quote.unpaid else quote.state.value,
)
return
quote.state = MintQuoteState.paid
await self.crud.update_mint_quote(quote=quote, db=self.db, conn=conn)
logger.trace(
@@ -73,47 +67,3 @@ class LedgerTasks(SupportsDb, SupportsBackends, SupportsEvents):
)
await self.events.submit(quote)
async def _confirm_invoice_paid_with_backend(self, quote) -> bool:
"""Ensure backend agrees invoice is settled before updating DB."""
try:
method = Method[quote.method]
except KeyError:
logger.error(f"Unknown payment method on quote {quote.quote}: {quote.method}")
return False
try:
unit = Unit[quote.unit]
except KeyError:
logger.error(f"Unknown unit on quote {quote.quote}: {quote.unit}")
return False
if not quote.checking_id:
logger.error(f"Quote {quote.quote} missing checking_id; cannot verify payment")
return False
method_backends = self.backends.get(method)
if not method_backends:
logger.error(f"No backend registered for method {method}")
return False
backend = method_backends.get(unit)
if not backend:
logger.error(f"No backend registered for method {method} unit {unit}")
return False
try:
status = await backend.get_invoice_status(quote.checking_id)
except Exception as exc:
logger.error(f"Backend verification failed for quote {quote.quote}: {exc}")
return False
if not status.settled:
logger.debug(
"Backend reported %s for quote %s; deferring state change",
status.result,
quote.quote,
)
return False
return True

69
poetry.lock generated
View File

@@ -199,41 +199,46 @@ coincurve = "*"
[[package]]
name = "breez-sdk-spark"
version = "0.3.4"
version = "0.4.2"
description = "Python language bindings for the Breez Spark SDK"
optional = false
python-versions = "*"
files = [
{file = "breez_sdk_spark-0.3.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9cbaa5324af163b763e3dfccc1f6a449ffbd74666f56a5d8a6d648ead3d997d5"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:6b2d7cf0b721bf2ba24e2b1e665cdffc8714ab85815f1d2f75cb1a839c23a03d"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:3c302a7fcd6db5c01de52c1e26600e86b5ddda59a233f64d2fe31f0033b7155e"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-win32.whl", hash = "sha256:4ea8fbe1b1f16c5e4d1ece199efc6bdf8c10827782994b8a1e51684023aea128"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:f641c0f033fd6c5b61c2af7440812404b0f4a7734aa831a170a4cbacd678e18a"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ee7b10b76a83bcc387e79470962e0f3a7ff81ab5db9f21953db056657fec90f7"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:c9f2d77dde456373af739ac0aadae1c38fa506cc53ede857fff33eae56d709e1"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:4d3e941e5996112f8116b2c1f3727f009b56a9d582472141cd97dee876a79a41"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-win32.whl", hash = "sha256:de2d2bfc6c7fee6086d75b1e37744da2467c0e62bc8e119b9c76664728cd4ead"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:0789d0c7d0852afa5ae1b08bbfc44ad841c92faf149356dda343577b4ca0c949"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:cb503060516c4ad96e3fbda5bf693dfede04eb0d1a8ab685453c8161fd808103"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:ae57372c1f559ffb5f12eaa9627538d408b25c695473900855e7290ef391ada4"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:5a4d13a9ebb402f1cb2ec10556e4150db6cf2a73c3c91d036d6f38860cca6895"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-win32.whl", hash = "sha256:4a3e4eb1404fc915d5bd6d515bd863001e791aed9415710778148fed31a6b43d"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:58cc6cd7551c70ac062c29e02a0f3b557b6b82cef4abe71f87094b1dafb72495"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f8f8a8ffe4392d0833487b089280f846b47c7320a8497a0fbd589b1af0dae9"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:c35d365a90da2f70aac7831987431e49a1f8f4ac0a5f92ab16bf868ec45c7792"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:2fe652e45ed761a4faee9e6a789ceb6fde09dc62a8f2662dfb72b387b936260c"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-win32.whl", hash = "sha256:0fe6fa8f156bf8f052e7c40f7cc6a063244793a6bbe24b560f84e0191e2b5914"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:cec60322d21d4e4300871020d0b4e8d3ba69a98846c588422696310ab48e5727"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:c93add503f09c0ca99c1b17598e90bb6b9181e98e77485b926851b75dc16423a"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-manylinux_2_31_aarch64.whl", hash = "sha256:9de93152ca2be527f73df215ab278884b0957578cb3670f15111a358cd55d0be"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:064cdaee5f92ee8a12c3b7d90633666e34b5eee129e722147354b45409f18506"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-win32.whl", hash = "sha256:0283815011edddfd18c82ff85e6b1e76092f9fd3c346cacbc700a67709d35a6f"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:e85fccbd6844ed3efa585d137a5b8166d12271f2e6e95530622a12a526776a4f"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:97fcf7372dc10051abd0be40548472fb7332c19263d215c17c95fb7ec6c715bb"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-manylinux_2_31_aarch64.whl", hash = "sha256:08a133b43ff123b7e231ff849930f884254ff608dc5407c12276d09ebe645e8b"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:69753fe329cc57e84883d33e2bddcb0bf816f849b724c0ddb86904f4c8d49b55"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-win32.whl", hash = "sha256:d68c8c22a837a4ca81375ee4f1d7f72e0808a120ca933c8818308fd185f21665"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:82fb1c8c147649775259e12e03a63e3ce89bbd000dd7690a0957977807a739e3"},
{file = "breez_sdk_spark-0.4.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:546675c389bc4f5d0262e53905074ea83c5ce21295321b0da29518f9e0bd4eb1"},
{file = "breez_sdk_spark-0.4.2-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:faf7ea92dfc3620f9e77d4b53b3bb2e5ad1f4fb0ecb40d4c83ad4f9b05c214cf"},
{file = "breez_sdk_spark-0.4.2-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:5f02ac027edb973ada9e1912725d02a8dafcb3518c0c7f9707c1388be2db908a"},
{file = "breez_sdk_spark-0.4.2-cp310-cp310-win32.whl", hash = "sha256:4175d83c7f6506a57479dc0fc58d20ed397e3ac5a8687780aae8bb2ed413f367"},
{file = "breez_sdk_spark-0.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:845cdf07b746d4702bd477bd574cc408b76baecf8104e1f160cd299404bff910"},
{file = "breez_sdk_spark-0.4.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6e9ef7192bbc9f4c93444b671a5a8e3af1686cec6c464b1122a23cc18e9ca708"},
{file = "breez_sdk_spark-0.4.2-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:1e7f77b100704ff1d4a7648e393e613443da4be7adc484397a787b609d1a72f7"},
{file = "breez_sdk_spark-0.4.2-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:9bcaa5453613b9ff51af3a1cfb5f1b23260f54a8703f61c9dbcf47bdbfd82b93"},
{file = "breez_sdk_spark-0.4.2-cp311-cp311-win32.whl", hash = "sha256:9676e73d08d7ae59a73e28e6b0d92bcf6406d8c94cff7136092fa97e26c01d82"},
{file = "breez_sdk_spark-0.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e0bf9dfcc0587298405772ef752deac2e6f4d6985e1f26a643b86fd41a8d3dd4"},
{file = "breez_sdk_spark-0.4.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5a699d536b29ec9fea14bde8c2fb0e11611fb85a10848c0a83121a3ff24d97f3"},
{file = "breez_sdk_spark-0.4.2-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:0f054dfb9c528d7c688177123d8837294e385295b2be5ef7909c76ec7c3d5012"},
{file = "breez_sdk_spark-0.4.2-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:6ecbc6e02baef53f306b0783b7f98fa0b3e71ebe74b97770300820412af14fa3"},
{file = "breez_sdk_spark-0.4.2-cp312-cp312-win32.whl", hash = "sha256:2eb6bd85be892627f83b16f235d92c47c8ba1f4742fb9c19816caecc60fc180f"},
{file = "breez_sdk_spark-0.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc06c25662ecaddd876641417a109e307f503dbb1cb24e7c77344bf5cedc3182"},
{file = "breez_sdk_spark-0.4.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5cd95e53c04f3172e0716f5fd2808f3bdf7cd73ff3bc898480c6e04836c73282"},
{file = "breez_sdk_spark-0.4.2-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:32232e5bb5b1601dcba2f76511b80cec7c2dae184c3427d9a01b91b995c38bc5"},
{file = "breez_sdk_spark-0.4.2-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:3f422019fbd23b935bfb38cf2a644b6c0beb9eb2407cf817a96c536cbcdb53ab"},
{file = "breez_sdk_spark-0.4.2-cp313-cp313-win32.whl", hash = "sha256:ce2f107be9c61145a77eb843df27097af435bfa44ce2c90db0f6a3afc27eae50"},
{file = "breez_sdk_spark-0.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:e9b81df1f0383b5540c6669fa076e0d1fc052931f5f9b862e2c8fce6b51947ff"},
{file = "breez_sdk_spark-0.4.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:0cf5023484efdbfb66378a9ac170790db3cfff0fea6b79f4bf44db650e874d2c"},
{file = "breez_sdk_spark-0.4.2-cp314-cp314-manylinux_2_31_aarch64.whl", hash = "sha256:36617a5698144cb8203182a9df97362f2ebe00a724ea1322df612a0c590cf6db"},
{file = "breez_sdk_spark-0.4.2-cp314-cp314-manylinux_2_31_x86_64.whl", hash = "sha256:940683ee2f430dfb274c851cc230e76c0cfcc827cea2120a6a55077435e54206"},
{file = "breez_sdk_spark-0.4.2-cp314-cp314-win32.whl", hash = "sha256:b87cf95b5365e182f7e7f9e20d7676099935fd73ebd5301320db7a7474c830a4"},
{file = "breez_sdk_spark-0.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:138ac415c9a2ce6a795e5d2b7ab0155c0d550caeb0cd46d9451a0e1089a0fd5a"},
{file = "breez_sdk_spark-0.4.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:efc1db5a2d2e249599010d777357e1f8ca85edc3ff2234df5fe73ae0d60d1a0b"},
{file = "breez_sdk_spark-0.4.2-cp38-cp38-manylinux_2_31_aarch64.whl", hash = "sha256:97715b2b8b6b608cfcc324413e3e5e11497449edf558af2d94fe9baee74feb67"},
{file = "breez_sdk_spark-0.4.2-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:91efb146410222959c5ce8182921ba432288ce2961646bf921da5612fa8f959b"},
{file = "breez_sdk_spark-0.4.2-cp38-cp38-win32.whl", hash = "sha256:90c9d50adb3e942ebed41bd982ffe58c54011953d558de200b7b66dddf5b36ea"},
{file = "breez_sdk_spark-0.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:865ef7181431a311c48c87e96267ea0c99bcc839431e522c9905b689a786a8f9"},
{file = "breez_sdk_spark-0.4.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:5ae040d1717f0e9c8c8d219d5e651b1d20548506ef98f4d37f27fa356cacc1d6"},
{file = "breez_sdk_spark-0.4.2-cp39-cp39-manylinux_2_31_aarch64.whl", hash = "sha256:b9f1ccbec9e119c2db1375e0f67a6463de90818e0e09139a4c24cb70bb232933"},
{file = "breez_sdk_spark-0.4.2-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:3a339eba9b932974e3690a7e218eadf11b1d71073e7dc93062f221902fab02d3"},
{file = "breez_sdk_spark-0.4.2-cp39-cp39-win32.whl", hash = "sha256:e73e55965028c7fe5460bf26ce63ca1f499deb28fe58766518610200fd0ccb68"},
{file = "breez_sdk_spark-0.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:dfe3182ddd07fcb1c964922d4284efa03562a2d5dbd397689789fc00e7369d2b"},
]
[[package]]
@@ -2696,4 +2701,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "69b57bd10ce6b62ec91378f866d77f641a4e99aa5cfc79ac2a4910e2bc1880c8"
content-hash = "ba313b7a4686a95efdca1a7743d365245246bba613d1d0ec13f986c45a1edc08"