not working

This commit is contained in:
2025-11-11 20:36:42 +01:00
parent 3957b790f1
commit 5c892f6083
5 changed files with 312 additions and 134 deletions

View File

@@ -12,4 +12,5 @@ WORKDIR /app
COPY . .
RUN poetry config virtualenvs.create false
RUN poetry lock --no-update
RUN poetry install --no-root
# Install all dependencies including breez-sdk-spark (now installs 0.3.4 automatically)
RUN poetry install --no-root --all-extras

View File

@@ -4,7 +4,12 @@ from pathlib import Path
from typing import List, Optional
from environs import Env # type: ignore
from pydantic import BaseSettings, Extra, Field
from pydantic import Field
try:
from pydantic_settings import BaseSettings
from pydantic import ConfigDict
except ImportError:
from pydantic import BaseSettings, Extra
env = Env()
@@ -29,14 +34,21 @@ class CashuSettings(BaseSettings):
lightning_reserve_fee_min: int = Field(default=2000)
max_order: int = Field(default=64)
class Config(BaseSettings.Config):
env_file = find_env_file()
env_file_encoding = "utf-8"
case_sensitive = False
extra = Extra.ignore
# def __init__(self, env_file=None):
# self.env_file = env_file or self.env_file
try:
# Pydantic v2 style
model_config = ConfigDict(
env_file=find_env_file(),
env_file_encoding="utf-8",
case_sensitive=False,
extra="ignore"
)
except NameError:
# Pydantic v1 style fallback
class Config(BaseSettings.Config):
env_file = find_env_file()
env_file_encoding = "utf-8"
case_sensitive = False
extra = Extra.ignore
class EnvSettings(CashuSettings):

View File

@@ -1,4 +1,5 @@
import asyncio
import inspect
import math
from typing import AsyncGenerator, Optional
@@ -21,39 +22,57 @@ from .base import (
def _extract_invoice_checking_id(payment) -> Optional[str]:
"""Return a normalized identifier that matches the stored mint quote checking_id."""
"""Return a normalized identifier (payment_hash) that matches the stored mint quote checking_id."""
try:
details = getattr(payment, "details", None)
if details:
logger.debug(
"Spark extract: payment.id=%s type=%s details_type=%s has_invoice=%s has_bolt11=%s has_hash=%s",
getattr(payment, "id", None),
type(payment),
type(details),
hasattr(details, "invoice"),
hasattr(details, "bolt11_invoice"),
hasattr(details, "payment_hash"),
)
# Only log details for debugging when needed
# logger.debug(
# f"Spark extract: payment.id={getattr(payment, 'id', None)} type={type(payment)} "
# f"details_type={type(details)} has_invoice={hasattr(details, 'invoice')} "
# f"has_bolt11={hasattr(details, 'bolt11_invoice')} has_hash={hasattr(details, 'payment_hash')}"
# )
# First priority: payment_hash (most reliable for matching)
payment_hash = getattr(details, "payment_hash", None)
if payment_hash:
# logger.debug(f"Spark extract: using details.payment_hash={payment_hash}")
return payment_hash.lower()
# Second priority: extract hash from invoice if available
invoice = getattr(details, "invoice", None)
if invoice:
logger.debug("Spark extract: using details.invoice=%s", invoice)
try:
from bolt11 import decode as bolt11_decode
invoice_obj = bolt11_decode(invoice)
if invoice_obj.payment_hash:
# logger.debug(f"Spark extract: extracted payment_hash from invoice={invoice_obj.payment_hash}")
return invoice_obj.payment_hash.lower()
except Exception:
pass
# Fallback to full invoice if can't extract hash
# logger.debug(f"Spark extract: using details.invoice={invoice[:50]}...")
return invoice.lower()
bolt11_details = getattr(details, "bolt11_invoice", None)
if bolt11_details:
bolt11 = getattr(bolt11_details, "bolt11", None)
if bolt11:
logger.debug("Spark extract: using bolt11_details.bolt11=%s", bolt11)
try:
from bolt11 import decode as bolt11_decode
invoice_obj = bolt11_decode(bolt11)
if invoice_obj.payment_hash:
# logger.debug(f"Spark extract: extracted payment_hash from bolt11={invoice_obj.payment_hash}")
return invoice_obj.payment_hash.lower()
except Exception:
pass
# logger.debug(f"Spark extract: using bolt11_details.bolt11={bolt11[:50]}...")
return bolt11.lower()
payment_hash = getattr(details, "payment_hash", None)
if payment_hash:
logger.debug("Spark extract: using details.payment_hash=%s", payment_hash)
return payment_hash.lower()
# Fallback: check payment-level payment_hash
payment_hash = getattr(payment, "payment_hash", None)
if payment_hash:
logger.debug("Spark extract: using payment.payment_hash=%s", payment_hash)
# logger.debug(f"Spark extract: using payment.payment_hash={payment_hash}")
return payment_hash.lower()
except Exception as exc: # pragma: no cover - defensive logging
logger.error(f"Failed to extract Spark invoice identifier: {exc}")
@@ -61,6 +80,46 @@ def _extract_invoice_checking_id(payment) -> Optional[str]:
return None
def _get_payment_fee_sats(payment) -> Optional[int]:
"""Return the payment fee in satoshis if available."""
fee = None
for attr in ("fee_sats", "fees", "fee"):
if hasattr(payment, attr):
fee = getattr(payment, attr)
if fee is not None:
break
if fee is None:
details = getattr(payment, "details", None)
if details is not None and hasattr(details, "fees"):
fee = getattr(details, "fees")
if fee is None:
return None
try:
return int(fee)
except (TypeError, ValueError):
try:
return int(str(fee))
except (TypeError, ValueError):
return None
def _get_payment_preimage(payment) -> Optional[str]:
"""Return the payment preimage if exposed by the SDK."""
preimage = getattr(payment, "preimage", None)
if preimage:
return preimage
details = getattr(payment, "details", None)
if details and hasattr(details, "preimage"):
return getattr(details, "preimage") or None
return None
# Import Spark SDK components
try:
from breez_sdk_spark import (
@@ -81,78 +140,128 @@ try:
SdkEvent,
SendPaymentRequest,
SendPaymentOptions,
Seed,
)
except ImportError:
# Event loop fix will be imported but not applied yet
set_sdk_event_loop = None
try:
from .spark_event_loop_fix import set_sdk_event_loop as _set_sdk_event_loop
set_sdk_event_loop = _set_sdk_event_loop
except ImportError:
pass
# uniffi_set_event_loop is not available in newer versions
spark_uniffi_set_event_loop = None
common_uniffi_set_event_loop = None
except ImportError as e:
# Create dummy classes for when SDK is not available
BreezSdk = None
EventListener = None
SparkPaymentStatus = None
logger.warning("Breez SDK Spark not available - SparkBackend will not function")
spark_uniffi_set_event_loop = None
common_uniffi_set_event_loop = None
logger.warning(f"Breez SDK Spark not available - SparkBackend will not function: {e}")
if EventListener is not None:
class SparkEventListener(EventListener):
"""Event listener for Spark SDK payment notifications"""
def __init__(self, queue: asyncio.Queue):
def __init__(self, queue: asyncio.Queue, loop: asyncio.AbstractEventLoop):
super().__init__()
self.queue = queue
self.loop = loop
def on_event(self, event: SdkEvent) -> None:
"""Handle SDK events in a thread-safe manner"""
"""Handle SDK events in a thread-safe manner with robust error handling"""
try:
# Debug log ALL events to understand what types of events we get
logger.info(f"Spark SDK event received: {event.__class__.__name__}, hasPayment={hasattr(event, 'payment')}, event={event}")
# Check if this is a payment event we care about and extract the invoice id
if hasattr(event, "payment"):
payment = event.payment
status = getattr(payment, "status", None)
if status != SparkPaymentStatus.COMPLETED:
logger.debug(
"Spark event %s ignored (status %s)",
event.__class__.__name__,
status,
)
return
payment_type = getattr(payment, "payment_type", None)
if payment_type != PaymentType.RECEIVE:
logger.debug(
"Spark event %s ignored (payment type %s)",
event.__class__.__name__,
payment_type,
# Debug log all payment events to understand what we're getting
logger.info(f"Spark payment event: status={status}, type={payment_type}, payment={payment}")
# Less restrictive filtering - allow various statuses that might indicate completed payments
if status and hasattr(SparkPaymentStatus, 'COMPLETED') and status != SparkPaymentStatus.COMPLETED:
# Check if it's a different completion status
if not (hasattr(SparkPaymentStatus, 'SETTLED') and status == SparkPaymentStatus.SETTLED):
logger.debug(
f"Spark event {event.__class__.__name__} ignored (status {status})"
)
return
# Less restrictive payment type filtering - log but don't reject non-RECEIVE types yet
if payment_type and hasattr(PaymentType, 'RECEIVE') and payment_type != PaymentType.RECEIVE:
logger.info(
f"Spark event {event.__class__.__name__} has non-RECEIVE type ({payment_type}) - processing anyway"
)
return
checking_id = _extract_invoice_checking_id(payment)
logger.debug(
"Spark event %s payment_type=%s status=%s payment_id=%s raw_payment=%r extracted_id=%s",
event.__class__.__name__,
getattr(payment, "payment_type", None),
getattr(payment, "status", None),
getattr(payment, "id", None),
payment,
checking_id,
f"Spark event {event.__class__.__name__} payment_type={getattr(payment, 'payment_type', None)} "
f"status={getattr(payment, 'status', None)} payment_id={getattr(payment, 'id', None)} "
f"raw_payment={payment!r} extracted_id={checking_id}"
)
if not checking_id:
logger.debug("Spark event %s ignored (no checking id)", event.__class__.__name__)
logger.debug(f"Spark event {event.__class__.__name__} ignored (no checking id)")
return
try:
loop = asyncio.get_running_loop()
# Thread-safe queue put with payment hash (checking_id)
asyncio.run_coroutine_threadsafe(
self.queue.put(checking_id),
loop,
)
except RuntimeError:
logger.warning("No running event loop found for Spark event")
# More robust thread-safe event handling
self._safe_put_event(checking_id)
except Exception as e:
logger.error(f"Error handling Spark event: {e}")
import traceback
logger.debug(f"Event handler traceback: {traceback.format_exc()}")
def _safe_put_event(self, checking_id: str) -> None:
"""Safely put an event into the queue from any thread context"""
try:
target_loop = self.loop
if target_loop is None:
logger.warning("Spark event listener has no target loop; dropping event")
return
if target_loop.is_closed():
logger.warning("Spark event listener target loop is closed; dropping event")
return
# Use call_soon_threadsafe for more reliable thread-safe event handling
def queue_put():
try:
self.queue.put_nowait(checking_id)
logger.info(f"Spark event successfully queued: {checking_id}")
except asyncio.QueueFull:
logger.warning(f"Spark event queue full, dropping event: {checking_id}")
except Exception as e:
logger.error(f"Failed to put event in queue: {e}")
target_loop.call_soon_threadsafe(queue_put)
except Exception as exc:
logger.warning(f"Failed to queue Spark event (expected from callback thread): {exc}")
# Fallback: try the original approach
try:
if self.loop and not self.loop.is_closed():
future = asyncio.run_coroutine_threadsafe(
self.queue.put(checking_id),
self.loop,
)
logger.info(f"Spark event fallback queued: {checking_id}")
except Exception as fallback_exc:
logger.error(f"Both event queueing methods failed: {fallback_exc}")
else:
class SparkEventListener:
"""Dummy event listener when Spark SDK is not available"""
def __init__(self, queue: asyncio.Queue):
def __init__(self, queue: asyncio.Queue, loop: asyncio.AbstractEventLoop):
self.queue = queue
self.loop = loop
def on_event(self, event) -> None:
"""Dummy event handler"""
@@ -178,6 +287,7 @@ class SparkBackend(LightningBackend):
self.event_queue: Optional[asyncio.Queue] = None
self.listener: Optional[SparkEventListener] = None
self.listener_id: Optional[str] = None
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
self._initialized = False
self._initialization_lock = asyncio.Lock()
self._connection_retry_count = 0
@@ -248,12 +358,24 @@ class SparkBackend(LightningBackend):
storage_dir = getattr(settings, 'mint_spark_storage_dir', 'data/spark')
connection_timeout = getattr(settings, 'mint_spark_connection_timeout', 30)
# ConnectRequest takes mnemonic directly, not a Seed object
event_loop = asyncio.get_running_loop()
# Store the event loop for SDK callbacks
if 'set_sdk_event_loop' in globals():
set_sdk_event_loop(event_loop)
for setter in (spark_uniffi_set_event_loop, common_uniffi_set_event_loop):
if setter:
try:
setter(event_loop)
except Exception as exc: # pragma: no cover - defensive log
logger.warning(f"Failed to register event loop with Spark SDK: {exc}")
# ConnectRequest requires a Seed object (mnemonic or entropy based)
seed = Seed.MNEMONIC(mnemonic=mnemonic, passphrase=None)
self.sdk = await asyncio.wait_for(
connect(
request=ConnectRequest(
config=config,
mnemonic=mnemonic,
seed=seed,
storage_dir=storage_dir
)
),
@@ -262,9 +384,11 @@ class SparkBackend(LightningBackend):
# Set up event listener for payment notifications
self.event_queue = asyncio.Queue()
self.listener = SparkEventListener(self.event_queue)
# add_event_listener is not async, it returns a string ID directly
self.listener_id = self.sdk.add_event_listener(listener=self.listener)
self._event_loop = event_loop
self.listener = SparkEventListener(self.event_queue, self._event_loop)
self.listener_id = await _await_if_needed(
self.sdk.add_event_listener(listener=self.listener)
)
logger.debug(f"Spark SDK initialized successfully on {network_str} network")
# Clear mnemonic from memory
@@ -275,12 +399,13 @@ class SparkBackend(LightningBackend):
"""Proper resource cleanup"""
try:
if hasattr(self, 'listener_id') and self.sdk:
# remove_event_listener is not async
self.sdk.remove_event_listener(id=self.listener_id)
if self.listener_id:
await _await_if_needed(
self.sdk.remove_event_listener(id=self.listener_id)
)
if self.sdk:
# disconnect is not async
self.sdk.disconnect()
await _await_if_needed(self.sdk.disconnect())
except Exception as e:
logger.error(f"Cleanup error: {e}")
@@ -288,6 +413,8 @@ class SparkBackend(LightningBackend):
self.sdk = None
self.listener = None
self.event_queue = None
self.listener_id = None
self._event_loop = None
self._initialized = False
async def _check_connectivity(self) -> bool:
@@ -296,7 +423,7 @@ class SparkBackend(LightningBackend):
if not self.sdk:
return False
await asyncio.wait_for(
self.sdk.get_info(request=GetInfoRequest()),
self.sdk.get_info(request=GetInfoRequest(ensure_synced=None)),
timeout=5.0
)
return True
@@ -306,7 +433,7 @@ class SparkBackend(LightningBackend):
async def status(self) -> StatusResponse:
try:
await self._ensure_initialized()
info = await self.sdk.get_info(request=GetInfoRequest())
info = await self.sdk.get_info(request=GetInfoRequest(ensure_synced=None))
return StatusResponse(
balance=Amount(Unit.sat, info.balance_sats),
error_message=None
@@ -336,15 +463,26 @@ class SparkBackend(LightningBackend):
)
request = ReceivePaymentRequest(payment_method=payment_method)
response = await self.sdk.receive_payment(request=request)
logger.debug(
"Spark create_invoice amount=%s response.payment_request=%s",
amount,
response.payment_request,
)
# Extract payment_hash from the invoice for consistent matching
from bolt11 import decode as bolt11_decode
try:
invoice_obj = bolt11_decode(response.payment_request)
payment_hash = invoice_obj.payment_hash
logger.debug(
f"Spark create_invoice amount={amount} payment_hash={payment_hash} invoice={response.payment_request[:50]}..."
)
except Exception as e:
logger.error(f"Failed to extract payment_hash from invoice: {e}")
# Fallback to using full invoice as checking_id
payment_hash = response.payment_request.lower()
checking_id_to_store = payment_hash.lower() if payment_hash else response.payment_request.lower()
logger.info(f"Spark storing checking_id: {checking_id_to_store[:20]}... (hash: {bool(payment_hash)})")
return InvoiceResponse(
ok=True,
checking_id=response.payment_request.lower(),
checking_id=checking_id_to_store,
payment_request=response.payment_request
)
except Exception as e:
@@ -388,11 +526,14 @@ class SparkBackend(LightningBackend):
# Map Spark payment status to PaymentResult
result = self._map_payment_status(payment)
fee_sats = _get_payment_fee_sats(payment)
preimage = _get_payment_preimage(payment)
return PaymentResponse(
result=result,
checking_id=payment.id,
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
preimage=payment.preimage if hasattr(payment, 'preimage') else None
fee=Amount(Unit.sat, fee_sats) if fee_sats is not None else None,
preimage=preimage
)
except Exception as e:
logger.error(f"Spark pay_invoice error for quote {quote.quote}: {e}")
@@ -416,24 +557,22 @@ class SparkBackend(LightningBackend):
for payment in list_response.payments:
payment_checking_id = _extract_invoice_checking_id(payment)
logger.debug(
"Spark get_invoice_status candidate id=%s target=%s status=%s payment_id=%s raw_payment=%r",
payment_checking_id,
normalized_checking_id,
getattr(payment, "status", None),
getattr(payment, "id", None),
payment,
)
if payment_checking_id and payment_checking_id == normalized_checking_id:
# Found our payment - return its status
logger.debug(
f"Spark payment found: target={normalized_checking_id} status={getattr(payment, 'status', None)}"
)
result = self._map_payment_status(payment)
fee_sats = _get_payment_fee_sats(payment)
preimage = _get_payment_preimage(payment)
return PaymentStatus(
result=result,
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
preimage=payment.preimage if hasattr(payment, 'preimage') else None
fee=Amount(Unit.sat, fee_sats) if fee_sats is not None else None,
preimage=preimage
)
# If not found in payments list, invoice is still pending
logger.debug(f"Spark payment not found for checking_id: {normalized_checking_id[:20]}...")
return PaymentStatus(
result=PaymentResult.PENDING,
error_message=None
@@ -450,18 +589,39 @@ class SparkBackend(LightningBackend):
try:
await self._ensure_initialized()
# Get payment by payment ID
get_request = GetPaymentRequest(payment_id=checking_id)
response = await self.sdk.get_payment(request=get_request)
payment = response.payment
# The checking_id is the invoice/bolt11 string for received payments
# We need to list payments and find the one with matching invoice
list_request = ListPaymentsRequest(payment_type=PaymentType.RECEIVE)
response = await self.sdk.list_payments(request=list_request)
# Find the payment with matching invoice
target_payment = None
checking_id_lower = checking_id.lower()
for payment in response.payments:
# Check if this payment's invoice matches our checking_id
invoice_id = _extract_invoice_checking_id(payment)
if invoice_id and invoice_id.lower() == checking_id_lower:
target_payment = payment
logger.debug(f"Found matching payment for invoice {checking_id[:20]}...")
break
if not target_payment:
logger.debug(f"No payment found for checking_id {checking_id[:20]}...")
return PaymentStatus(
result=PaymentResult.PENDING,
error_message="Payment not found yet"
)
# Map Spark payment status to PaymentResult
result = self._map_payment_status(payment)
result = self._map_payment_status(target_payment)
fee_sats = _get_payment_fee_sats(target_payment)
preimage = _get_payment_preimage(target_payment)
return PaymentStatus(
result=result,
fee=Amount(Unit.sat, payment.fee_sats) if hasattr(payment, 'fee_sats') and payment.fee_sats else None,
preimage=payment.preimage if hasattr(payment, 'preimage') else None
fee=Amount(Unit.sat, fee_sats) if fee_sats is not None else None,
preimage=preimage
)
except Exception as e:
logger.error(f"Get payment status error: {e}")
@@ -539,7 +699,7 @@ class SparkBackend(LightningBackend):
self.event_queue.get(),
timeout=30.0
)
logger.debug("Spark paid_invoices_stream emitting checking_id=%s", payment_id)
logger.debug(f"Spark paid_invoices_stream emitting checking_id={payment_id}")
yield payment_id
# Reset retry delay on success
@@ -577,3 +737,8 @@ class SparkBackend(LightningBackend):
except Exception as e:
logger.warning(f"Spark health check failed: {e}")
return False
async def _await_if_needed(value):
"""Await value if it is awaitable; otherwise return it directly."""
if inspect.isawaitable(value):
return await value
return value

64
poetry.lock generated
View File

@@ -199,41 +199,41 @@ coincurve = "*"
[[package]]
name = "breez-sdk-spark"
version = "0.1.9"
version = "0.3.4"
description = "Python language bindings for the Breez Spark SDK"
optional = false
python-versions = "*"
files = [
{file = "breez_sdk_spark-0.1.9-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7fb6c702f77f54306ebeafc5974461290e96c143df6f1a5afb974b62576b7972"},
{file = "breez_sdk_spark-0.1.9-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:041b5ac3d6b2ec9c5ffec1a46ed9874376ad16bfd50bc28c2fc2a9a51a473bab"},
{file = "breez_sdk_spark-0.1.9-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:86a78f09cdc4dc8fbcc995073e11bdc0c06982e00b214e0f0210b15fb663e1a3"},
{file = "breez_sdk_spark-0.1.9-cp310-cp310-win32.whl", hash = "sha256:01c64566d2146b5deabb9e7291c3ebcc400bb054ef8391cc2ca50480c03015c2"},
{file = "breez_sdk_spark-0.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:4c6906390454f6952bf2b923949d13eefcdccdf8da073496f0023bba41431933"},
{file = "breez_sdk_spark-0.1.9-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a5467d289d0245d31343f6d10744071cd092cd2b108fdf5e76e1b2f9e63c4a28"},
{file = "breez_sdk_spark-0.1.9-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:afadd71fb816c24ebebbc709a13c1c06091941de31c9add321316c982f06652a"},
{file = "breez_sdk_spark-0.1.9-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:5fd1e2e0c381bc4c142c748f4f29b43fb089b8bf909dd55107f63ff8b5fd733d"},
{file = "breez_sdk_spark-0.1.9-cp311-cp311-win32.whl", hash = "sha256:e53cb1890643f697316f4aad7c831794a798f3d6e8f288aa8906b841cc582678"},
{file = "breez_sdk_spark-0.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:4d7e73759ccb1da1fca587ffe6f72d166459b34d7f6614de23352860306538ec"},
{file = "breez_sdk_spark-0.1.9-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5e944b035fecd6de90b9d18eb8f32b990f662880bd91fb847011f10d12e88367"},
{file = "breez_sdk_spark-0.1.9-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:6991e3cd3025ee153eb2a844cf06b1892c196a6079ee3db24e449e55aee06544"},
{file = "breez_sdk_spark-0.1.9-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:2ceb597a3bff6755525234ed07892dc82034c745fd9578629b3db85433907165"},
{file = "breez_sdk_spark-0.1.9-cp312-cp312-win32.whl", hash = "sha256:67d4e9dbc5779d8cd67e879be06006f714ec46b3d8c55ca9f08efae1e21e8889"},
{file = "breez_sdk_spark-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:e418d47aad183cd80a5bd4629b90511e7624676db8996c0a83be42f2d528f650"},
{file = "breez_sdk_spark-0.1.9-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d966f858bd889b3a509d8f61d5f1e3f72fd1fab7e1b9830b523363fe357d60fa"},
{file = "breez_sdk_spark-0.1.9-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:66763c74afdf2a5c4c4f5248666f87725c76c5853685a377467c3935516576b7"},
{file = "breez_sdk_spark-0.1.9-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:ca125db55d0a04b08956c2340b2f8091b55848e5e77c4569128badbf1eca6991"},
{file = "breez_sdk_spark-0.1.9-cp313-cp313-win32.whl", hash = "sha256:3ea9576e9fcde542ff0dda8df4e3dc20147bb7a93eecf8312b393d825ea170c9"},
{file = "breez_sdk_spark-0.1.9-cp313-cp313-win_amd64.whl", hash = "sha256:540baacc12287f7e8828e6bbbdf3e6aa9b1aaaf1cb85a0980c0a6e70933bfb3d"},
{file = "breez_sdk_spark-0.1.9-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1159b0e92581a30e05936e7997e19ef682b1d93057d5136847185a01145eb25c"},
{file = "breez_sdk_spark-0.1.9-cp38-cp38-manylinux_2_31_aarch64.whl", hash = "sha256:84f91b21a7698938dc7707fd31be10d7449347870ba3d07e7f3ff6260c8de754"},
{file = "breez_sdk_spark-0.1.9-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:374e9f486d4e576aa3e55e1aab912c474e4d96115a5b678c1f95a08610e79968"},
{file = "breez_sdk_spark-0.1.9-cp38-cp38-win32.whl", hash = "sha256:326251dff34f0c19614746eb345bc9906785a071a3eaa7da7c54ea4076557b4c"},
{file = "breez_sdk_spark-0.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:a205f11710fd1720634632a20e502d27fbdd1d400d94b5545ca74a672f509b1b"},
{file = "breez_sdk_spark-0.1.9-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:aa3ffaf682818608a38582b76db92f4fa2ffdf44634e2163014e594722513106"},
{file = "breez_sdk_spark-0.1.9-cp39-cp39-manylinux_2_31_aarch64.whl", hash = "sha256:7216030f2cea3d3e5fa3a4e98fda7245334de9e96ce2186bdfd11f49bb1872f7"},
{file = "breez_sdk_spark-0.1.9-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:2f4512dd96bf2c7eda940db717fc961e20716dda6a38095e63322c38437a43c2"},
{file = "breez_sdk_spark-0.1.9-cp39-cp39-win32.whl", hash = "sha256:582855db91d81fe9f0f9b340e54e54d55b16d76b890467f77178b777561da9d6"},
{file = "breez_sdk_spark-0.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:9f5c6110e9f378bdf446303b0f486dc221c96691710cc453a06ea5f5c12ee226"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9cbaa5324af163b763e3dfccc1f6a449ffbd74666f56a5d8a6d648ead3d997d5"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-manylinux_2_31_aarch64.whl", hash = "sha256:6b2d7cf0b721bf2ba24e2b1e665cdffc8714ab85815f1d2f75cb1a839c23a03d"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:3c302a7fcd6db5c01de52c1e26600e86b5ddda59a233f64d2fe31f0033b7155e"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-win32.whl", hash = "sha256:4ea8fbe1b1f16c5e4d1ece199efc6bdf8c10827782994b8a1e51684023aea128"},
{file = "breez_sdk_spark-0.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:f641c0f033fd6c5b61c2af7440812404b0f4a7734aa831a170a4cbacd678e18a"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ee7b10b76a83bcc387e79470962e0f3a7ff81ab5db9f21953db056657fec90f7"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:c9f2d77dde456373af739ac0aadae1c38fa506cc53ede857fff33eae56d709e1"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:4d3e941e5996112f8116b2c1f3727f009b56a9d582472141cd97dee876a79a41"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-win32.whl", hash = "sha256:de2d2bfc6c7fee6086d75b1e37744da2467c0e62bc8e119b9c76664728cd4ead"},
{file = "breez_sdk_spark-0.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:0789d0c7d0852afa5ae1b08bbfc44ad841c92faf149356dda343577b4ca0c949"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:cb503060516c4ad96e3fbda5bf693dfede04eb0d1a8ab685453c8161fd808103"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:ae57372c1f559ffb5f12eaa9627538d408b25c695473900855e7290ef391ada4"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:5a4d13a9ebb402f1cb2ec10556e4150db6cf2a73c3c91d036d6f38860cca6895"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-win32.whl", hash = "sha256:4a3e4eb1404fc915d5bd6d515bd863001e791aed9415710778148fed31a6b43d"},
{file = "breez_sdk_spark-0.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:58cc6cd7551c70ac062c29e02a0f3b557b6b82cef4abe71f87094b1dafb72495"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f8f8a8ffe4392d0833487b089280f846b47c7320a8497a0fbd589b1af0dae9"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-manylinux_2_31_aarch64.whl", hash = "sha256:c35d365a90da2f70aac7831987431e49a1f8f4ac0a5f92ab16bf868ec45c7792"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:2fe652e45ed761a4faee9e6a789ceb6fde09dc62a8f2662dfb72b387b936260c"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-win32.whl", hash = "sha256:0fe6fa8f156bf8f052e7c40f7cc6a063244793a6bbe24b560f84e0191e2b5914"},
{file = "breez_sdk_spark-0.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:cec60322d21d4e4300871020d0b4e8d3ba69a98846c588422696310ab48e5727"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:c93add503f09c0ca99c1b17598e90bb6b9181e98e77485b926851b75dc16423a"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-manylinux_2_31_aarch64.whl", hash = "sha256:9de93152ca2be527f73df215ab278884b0957578cb3670f15111a358cd55d0be"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-manylinux_2_31_x86_64.whl", hash = "sha256:064cdaee5f92ee8a12c3b7d90633666e34b5eee129e722147354b45409f18506"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-win32.whl", hash = "sha256:0283815011edddfd18c82ff85e6b1e76092f9fd3c346cacbc700a67709d35a6f"},
{file = "breez_sdk_spark-0.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:e85fccbd6844ed3efa585d137a5b8166d12271f2e6e95530622a12a526776a4f"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:97fcf7372dc10051abd0be40548472fb7332c19263d215c17c95fb7ec6c715bb"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-manylinux_2_31_aarch64.whl", hash = "sha256:08a133b43ff123b7e231ff849930f884254ff608dc5407c12276d09ebe645e8b"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:69753fe329cc57e84883d33e2bddcb0bf816f849b724c0ddb86904f4c8d49b55"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-win32.whl", hash = "sha256:d68c8c22a837a4ca81375ee4f1d7f72e0808a120ca933c8818308fd185f21665"},
{file = "breez_sdk_spark-0.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:82fb1c8c147649775259e12e03a63e3ce89bbd000dd7690a0957977807a739e3"},
]
[[package]]
@@ -2696,4 +2696,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "af82f494ec3ac5f839968129579cd5986e3ff6994f0d93fcb0ddcec4374fc01f"
content-hash = "69b57bd10ce6b62ec91378f866d77f641a4e99aa5cfc79ac2a4910e2bc1880c8"

View File

@@ -44,7 +44,7 @@ redis = "^5.1.1"
brotli = "^1.1.0"
zstandard = "^0.23.0"
jinja2 = "^3.1.5"
breez-sdk-spark = "^0.1.0"
breez-sdk-spark = "^0.3.0"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.24.0"