Fix Tokenv4 handling of base64 keysets (#575)

* wip: handle tokenv4 if the keyset is base64

* wip

* Tokens now dataclasses

* Deserialize DLEQ from string

* add tokenv3/v4 base64 keyset ID tests

* fix cli
This commit is contained in:
callebtc
2024-07-10 14:12:03 +02:00
committed by GitHub
parent 8eea541b74
commit 26b94951fc
12 changed files with 326 additions and 185 deletions

View File

@@ -170,7 +170,7 @@ This command runs the mint on your local computer. Skip this step if you want to
## Docker ## Docker
``` ```
docker run -d -p 3338:3338 --name nutshell -e MINT_BACKEND_BOLT11_SAT=FakeWallet -e MINT_LISTEN_HOST=0.0.0.0 -e MINT_LISTEN_PORT=3338 -e MINT_PRIVATE_KEY=TEST_PRIVATE_KEY cashubtc/nutshell:0.15.3 poetry run mint docker run -d -p 3338:3338 --name nutshell -e MINT_BACKEND_BOLT11_SAT=FakeWallet -e MINT_LISTEN_HOST=0.0.0.0 -e MINT_LISTEN_PORT=3338 -e MINT_PRIVATE_KEY=TEST_PRIVATE_KEY cashubtc/nutshell:0.16.0 poetry run mint
``` ```
## From this repository ## From this repository

View File

@@ -1,7 +1,8 @@
import base64 import base64
import json import json
import math import math
from dataclasses import dataclass from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from enum import Enum from enum import Enum
from sqlite3 import Row from sqlite3 import Row
from typing import Any, Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
@@ -148,7 +149,10 @@ class Proof(BaseModel):
@classmethod @classmethod
def from_dict(cls, proof_dict: dict): def from_dict(cls, proof_dict: dict):
if proof_dict.get("dleq") and isinstance(proof_dict["dleq"], str): if proof_dict.get("dleq") and isinstance(proof_dict["dleq"], dict):
proof_dict["dleq"] = DLEQWallet(**proof_dict["dleq"])
elif proof_dict.get("dleq") and isinstance(proof_dict["dleq"], str):
# Proofs read from the database have the DLEQ proof as a string
proof_dict["dleq"] = DLEQWallet(**json.loads(proof_dict["dleq"])) proof_dict["dleq"] = DLEQWallet(**json.loads(proof_dict["dleq"]))
else: else:
# overwrite the empty string with None # overwrite the empty string with None
@@ -752,6 +756,48 @@ class MintKeyset:
# ------- TOKEN ------- # ------- TOKEN -------
class Token(ABC):
@property
@abstractmethod
def proofs(self) -> List[Proof]:
...
@property
@abstractmethod
def amount(self) -> int:
...
@property
@abstractmethod
def mint(self) -> str:
...
@property
@abstractmethod
def keysets(self) -> List[str]:
...
@property
@abstractmethod
def memo(self) -> Optional[str]:
...
@memo.setter
@abstractmethod
def memo(self, memo: Optional[str]):
...
@property
@abstractmethod
def unit(self) -> str:
...
@unit.setter
@abstractmethod
def unit(self, unit: str):
...
class TokenV3Token(BaseModel): class TokenV3Token(BaseModel):
mint: Optional[str] = None mint: Optional[str] = None
proofs: List[Proof] proofs: List[Proof]
@@ -763,32 +809,59 @@ class TokenV3Token(BaseModel):
return return_dict return return_dict
class TokenV3(BaseModel): @dataclass
class TokenV3(Token):
""" """
A Cashu token that includes proofs and their respective mints. Can include proofs from multiple different mints and keysets. A Cashu token that includes proofs and their respective mints. Can include proofs from multiple different mints and keysets.
""" """
token: List[TokenV3Token] = [] token: List[TokenV3Token] = field(default_factory=list)
memo: Optional[str] = None _memo: Optional[str] = None
unit: Optional[str] = None _unit: str = "sat"
def get_proofs(self): class Config:
allow_population_by_field_name = True
@property
def proofs(self) -> List[Proof]:
return [proof for token in self.token for proof in token.proofs] return [proof for token in self.token for proof in token.proofs]
def get_amount(self): @property
return sum([p.amount for p in self.get_proofs()]) def amount(self) -> int:
return sum([p.amount for p in self.proofs])
def get_keysets(self): @property
return list(set([p.id for p in self.get_proofs()])) def keysets(self) -> List[str]:
return list(set([p.id for p in self.proofs]))
def get_mints(self): @property
def mint(self) -> str:
return self.mints[0]
@property
def mints(self) -> List[str]:
return list(set([t.mint for t in self.token if t.mint])) return list(set([t.mint for t in self.token if t.mint]))
@property
def memo(self) -> Optional[str]:
return str(self._memo) if self._memo else None
@memo.setter
def memo(self, memo: Optional[str]):
self._memo = memo
@property
def unit(self) -> str:
return self._unit
@unit.setter
def unit(self, unit: str):
self._unit = unit
def serialize_to_dict(self, include_dleq=False): def serialize_to_dict(self, include_dleq=False):
return_dict = dict(token=[t.to_dict(include_dleq) for t in self.token]) return_dict = dict(token=[t.to_dict(include_dleq) for t in self.token])
if self.memo: if self.memo:
return_dict.update(dict(memo=self.memo)) # type: ignore return_dict.update(dict(memo=self.memo)) # type: ignore
if self.unit:
return_dict.update(dict(unit=self.unit)) # type: ignore return_dict.update(dict(unit=self.unit)) # type: ignore
return return_dict return return_dict
@@ -816,10 +889,30 @@ class TokenV3(BaseModel):
tokenv3_serialized = prefix tokenv3_serialized = prefix
# encode the token as a base64 string # encode the token as a base64 string
tokenv3_serialized += base64.urlsafe_b64encode( tokenv3_serialized += base64.urlsafe_b64encode(
json.dumps(self.serialize_to_dict(include_dleq)).encode() json.dumps(
self.serialize_to_dict(include_dleq), separators=(",", ":")
).encode()
).decode() ).decode()
return tokenv3_serialized return tokenv3_serialized
@classmethod
def parse_obj(cls, token_dict: Dict[str, Any]):
if not token_dict.get("token"):
raise Exception("Token must contain proofs.")
token: List[Dict[str, Any]] = token_dict.get("token") or []
assert token, "Token must contain proofs."
return cls(
token=[
TokenV3Token(
mint=t.get("mint"),
proofs=[Proof.from_dict(p) for p in t.get("proofs") or []],
)
for t in token
],
_memo=token_dict.get("memo"),
_unit=token_dict.get("unit") or "sat",
)
class TokenV4DLEQ(BaseModel): class TokenV4DLEQ(BaseModel):
""" """
@@ -868,7 +961,8 @@ class TokenV4Token(BaseModel):
p: List[TokenV4Proof] p: List[TokenV4Proof]
class TokenV4(BaseModel): @dataclass
class TokenV4(Token):
# mint URL # mint URL
m: str m: str
# unit # unit
@@ -882,14 +976,25 @@ class TokenV4(BaseModel):
def mint(self) -> str: def mint(self) -> str:
return self.m return self.m
def set_mint(self, mint: str):
self.m = mint
@property @property
def memo(self) -> Optional[str]: def memo(self) -> Optional[str]:
return self.d return self.d
@memo.setter
def memo(self, memo: Optional[str]):
self.d = memo
@property @property
def unit(self) -> str: def unit(self) -> str:
return self.u return self.u
@unit.setter
def unit(self, unit: str):
self.u = unit
@property @property
def amounts(self) -> List[int]: def amounts(self) -> List[int]:
return [p.a for token in self.t for p in token.p] return [p.a for token in self.t for p in token.p]
@@ -921,12 +1026,16 @@ class TokenV4(BaseModel):
for p in token.p for p in token.p
] ]
@property
def keysets(self) -> List[str]:
return list(set([p.i.hex() for p in self.t]))
@classmethod @classmethod
def from_tokenv3(cls, tokenv3: TokenV3): def from_tokenv3(cls, tokenv3: TokenV3):
if not len(tokenv3.get_mints()) == 1: if not len(tokenv3.mints) == 1:
raise Exception("TokenV3 must contain proofs from only one mint.") raise Exception("TokenV3 must contain proofs from only one mint.")
proofs = tokenv3.get_proofs() proofs = tokenv3.proofs
proofs_by_id: Dict[str, List[Proof]] = {} proofs_by_id: Dict[str, List[Proof]] = {}
for proof in proofs: for proof in proofs:
proofs_by_id.setdefault(proof.id, []).append(proof) proofs_by_id.setdefault(proof.id, []).append(proof)
@@ -960,7 +1069,7 @@ class TokenV4(BaseModel):
# set memo # set memo
cls.d = tokenv3.memo cls.d = tokenv3.memo
# set mint # set mint
cls.m = tokenv3.get_mints()[0] cls.m = tokenv3.mint
# set unit # set unit
cls.u = tokenv3.unit or "sat" cls.u = tokenv3.unit or "sat"
return cls(t=cls.t, d=cls.d, m=cls.m, u=cls.u) return cls(t=cls.t, d=cls.d, m=cls.m, u=cls.u)
@@ -1016,7 +1125,7 @@ class TokenV4(BaseModel):
return cls.parse_obj(token) return cls.parse_obj(token)
def to_tokenv3(self) -> TokenV3: def to_tokenv3(self) -> TokenV3:
tokenv3 = TokenV3() tokenv3 = TokenV3(_memo=self.d, _unit=self.u)
for token in self.t: for token in self.t:
tokenv3.token.append( tokenv3.token.append(
TokenV3Token( TokenV3Token(
@@ -1043,3 +1152,12 @@ class TokenV4(BaseModel):
) )
) )
return tokenv3 return tokenv3
@classmethod
def parse_obj(cls, token_dict: dict):
return cls(
m=token_dict["m"],
u=token_dict["u"],
t=[TokenV4Token(**t) for t in token_dict["t"]],
d=token_dict.get("d", None),
)

View File

@@ -100,10 +100,8 @@ class PostMintQuoteRequest(BaseModel):
class PostMintQuoteResponse(BaseModel): class PostMintQuoteResponse(BaseModel):
quote: str # quote id quote: str # quote id
request: str # input payment request request: str # input payment request
paid: Optional[ paid: Optional[bool] # DEPRECATED as per NUT-04 PR #141
bool state: Optional[str] # state of the quote
] # whether the request has been paid # DEPRECATED as per NUT PR #141
state: str # state of the quote
expiry: Optional[int] # expiry of the quote expiry: Optional[int] # expiry of the quote
@classmethod @classmethod
@@ -180,8 +178,10 @@ class PostMeltQuoteResponse(BaseModel):
quote: str # quote id quote: str # quote id
amount: int # input amount amount: int # input amount
fee_reserve: int # input fee reserve fee_reserve: int # input fee reserve
paid: bool # whether the request has been paid # DEPRECATED as per NUT PR #136 paid: Optional[
state: str # state of the quote bool
] # whether the request has been paid # DEPRECATED as per NUT PR #136
state: Optional[str] # state of the quote
expiry: Optional[int] # expiry of the quote expiry: Optional[int] # expiry of the quote
payment_preimage: Optional[str] = None # payment preimage payment_preimage: Optional[str] = None # payment preimage
change: Union[List[BlindedSignature], None] = None change: Union[List[BlindedSignature], None] = None

View File

@@ -1,8 +1,8 @@
from ...core.base import TokenV4 from ...core.base import Token
from ...wallet.crud import get_keysets from ...wallet.crud import get_keysets
async def verify_mints(wallet, tokenObj: TokenV4): async def verify_mints(wallet, tokenObj: Token):
# verify mints # verify mints
mint = tokenObj.mint mint = tokenObj.mint
mint_keysets = await get_keysets(mint_url=mint, db=wallet.db) mint_keysets = await get_keysets(mint_url=mint, db=wallet.db)

View File

@@ -8,7 +8,7 @@ from typing import Optional
from fastapi import APIRouter, Query from fastapi import APIRouter, Query
from ...core.base import TokenV3, TokenV4 from ...core.base import Token, TokenV3
from ...core.helpers import sum_proofs from ...core.helpers import sum_proofs
from ...core.settings import settings from ...core.settings import settings
from ...lightning.base import ( from ...lightning.base import (
@@ -261,7 +261,7 @@ async def receive_command(
wallet = await mint_wallet() wallet = await mint_wallet()
initial_balance = wallet.available_balance initial_balance = wallet.available_balance
if token: if token:
tokenObj: TokenV4 = deserialize_token_from_string(token) tokenObj: Token = deserialize_token_from_string(token)
await verify_mints(wallet, tokenObj) await verify_mints(wallet, tokenObj)
await receive(wallet, tokenObj) await receive(wallet, tokenObj)
elif nostr: elif nostr:
@@ -317,7 +317,7 @@ async def burn(
else: else:
# check only the specified ones # check only the specified ones
tokenObj = TokenV3.deserialize(token) tokenObj = TokenV3.deserialize(token)
proofs = tokenObj.get_proofs() proofs = tokenObj.proofs
if delete: if delete:
await wallet.invalidate(proofs) await wallet.invalidate(proofs)

View File

@@ -15,7 +15,7 @@ import click
from click import Context from click import Context
from loguru import logger from loguru import logger
from ...core.base import Invoice, Method, MintQuoteState, TokenV3, TokenV4, Unit from ...core.base import Invoice, Method, MintQuoteState, TokenV4, Unit
from ...core.helpers import sum_proofs from ...core.helpers import sum_proofs
from ...core.json_rpc.base import JSONRPCNotficationParams from ...core.json_rpc.base import JSONRPCNotficationParams
from ...core.logging import configure_logger from ...core.logging import configure_logger
@@ -441,6 +441,16 @@ async def swap(ctx: Context):
@coro @coro
async def balance(ctx: Context, verbose): async def balance(ctx: Context, verbose):
wallet: Wallet = ctx.obj["WALLET"] wallet: Wallet = ctx.obj["WALLET"]
if verbose:
wallet = await wallet.with_db(
url=wallet.url,
db=wallet.db.db_location,
name=wallet.name,
skip_db_read=False,
unit=wallet.unit.name,
load_all_keysets=True,
)
unit_balances = wallet.balance_per_unit() unit_balances = wallet.balance_per_unit()
await wallet.load_proofs(reload=True) await wallet.load_proofs(reload=True)
@@ -597,13 +607,13 @@ async def receive_cli(
# verify that we trust the mint in this tokens # verify that we trust the mint in this tokens
# ask the user if they want to trust the new mint # ask the user if they want to trust the new mint
mint_url = token_obj.mint mint_url = token_obj.mint
mint_wallet = Wallet( mint_wallet = await Wallet.with_db(
mint_url, mint_url,
os.path.join(settings.cashu_dir, wallet.name), os.path.join(settings.cashu_dir, wallet.name),
unit=token_obj.unit, unit=token_obj.unit,
) )
await verify_mint(mint_wallet, mint_url) await verify_mint(mint_wallet, mint_url)
receive_wallet = await receive(wallet, token_obj) receive_wallet = await receive(mint_wallet, token_obj)
ctx.obj["WALLET"] = receive_wallet ctx.obj["WALLET"] = receive_wallet
elif nostr: elif nostr:
await receive_nostr(wallet) await receive_nostr(wallet)
@@ -672,8 +682,8 @@ async def burn(ctx: Context, token: str, all: bool, force: bool, delete: str):
proofs = [proof for proof in reserved_proofs if proof["send_id"] == delete] proofs = [proof for proof in reserved_proofs if proof["send_id"] == delete]
else: else:
# check only the specified ones # check only the specified ones
token_obj = TokenV3.deserialize(token) tokenObj = deserialize_token_from_string(token)
proofs = token_obj.get_proofs() proofs = tokenObj.proofs
if delete: if delete:
await wallet.invalidate(proofs) await wallet.invalidate(proofs)
@@ -709,10 +719,18 @@ async def burn(ctx: Context, token: str, all: bool, force: bool, delete: str):
@coro @coro
async def pending(ctx: Context, legacy, number: int, offset: int): async def pending(ctx: Context, legacy, number: int, offset: int):
wallet: Wallet = ctx.obj["WALLET"] wallet: Wallet = ctx.obj["WALLET"]
wallet = await Wallet.with_db(
url=wallet.url,
db=wallet.db.db_location,
name=wallet.name,
skip_db_read=False,
unit=wallet.unit.name,
load_all_keysets=True,
)
reserved_proofs = await get_reserved_proofs(wallet.db) reserved_proofs = await get_reserved_proofs(wallet.db)
if len(reserved_proofs): if len(reserved_proofs):
print("--------------------------\n") print("--------------------------\n")
sorted_proofs = sorted(reserved_proofs, key=itemgetter("send_id")) # type: ignore sorted_proofs = sorted(reserved_proofs, key=itemgetter("send_id"), reverse=True) # type: ignore
if number: if number:
number += offset number += offset
for i, (key, value) in islice( for i, (key, value) in islice(
@@ -737,7 +755,7 @@ async def pending(ctx: Context, legacy, number: int, offset: int):
).strftime("%Y-%m-%d %H:%M:%S") ).strftime("%Y-%m-%d %H:%M:%S")
print( print(
f"#{i} Amount:" f"#{i} Amount:"
f" {wallet.unit.str(sum_proofs(grouped_proofs))} Time:" f" {Unit[token_obj.unit].str(sum_proofs(grouped_proofs))} Time:"
f" {reserved_date} ID: {key} Mint: {mint}\n" f" {reserved_date} ID: {key} Mint: {mint}\n"
) )
print(f"{token}\n") print(f"{token}\n")

View File

@@ -27,9 +27,13 @@ async def get_unit_wallet(ctx: Context, force_select: bool = False):
await wallet.load_proofs(reload=False) await wallet.load_proofs(reload=False)
# show balances per unit # show balances per unit
unit_balances = wallet.balance_per_unit() unit_balances = wallet.balance_per_unit()
if wallet.unit in [unit_balances.keys()] and not force_select:
return wallet logger.debug(f"Wallet URL: {wallet.url}")
elif len(unit_balances) > 1 and not ctx.obj["UNIT"]: logger.debug(f"Wallet unit: {wallet.unit}")
logger.debug(f"mint_balances: {unit_balances}")
logger.debug(f"ctx.obj['UNIT']: {ctx.obj['UNIT']}")
if len(unit_balances) > 1 and not ctx.obj["UNIT"]:
print(f"You have balances in {len(unit_balances)} units:") print(f"You have balances in {len(unit_balances)} units:")
print("") print("")
for i, (k, v) in enumerate(unit_balances.items()): for i, (k, v) in enumerate(unit_balances.items()):
@@ -68,14 +72,15 @@ async def get_mint_wallet(ctx: Context, force_select: bool = False):
""" """
# we load a dummy wallet so we can check the balance per mint # we load a dummy wallet so we can check the balance per mint
wallet: Wallet = ctx.obj["WALLET"] wallet: Wallet = ctx.obj["WALLET"]
await wallet.load_proofs(reload=False) await wallet.load_proofs(reload=True, all_keysets=True)
mint_balances = await wallet.balance_per_minturl() mint_balances = await wallet.balance_per_minturl(unit=wallet.unit)
logger.debug(f"Wallet URL: {wallet.url}")
if ctx.obj["HOST"] not in mint_balances and not force_select: logger.debug(f"Wallet unit: {wallet.unit}")
mint_url = wallet.url logger.debug(f"mint_balances: {mint_balances}")
elif len(mint_balances) > 1: logger.debug(f"ctx.obj['HOST']: {ctx.obj['HOST']}")
if len(mint_balances) > 1:
# if we have balances on more than one mint, we ask the user to select one # if we have balances on more than one mint, we ask the user to select one
await print_mint_balances(wallet, show_mints=True) await print_mint_balances(wallet, show_mints=True, mint_balances=mint_balances)
url_max = max(mint_balances, key=lambda v: mint_balances[v]["available"]) url_max = max(mint_balances, key=lambda v: mint_balances[v]["available"])
nr_max = list(mint_balances).index(url_max) + 1 nr_max = list(mint_balances).index(url_max) + 1
@@ -92,10 +97,10 @@ async def get_mint_wallet(ctx: Context, force_select: bool = False):
mint_url = list(mint_balances.keys())[int(mint_nr_str) - 1] mint_url = list(mint_balances.keys())[int(mint_nr_str) - 1]
else: else:
raise Exception("invalid input.") raise Exception("invalid input.")
elif ctx.obj["HOST"] and ctx.obj["HOST"] not in mint_balances.keys():
mint_url = ctx.obj["HOST"]
elif len(mint_balances) == 1: elif len(mint_balances) == 1:
mint_url = list(mint_balances.keys())[0] mint_url = list(mint_balances.keys())[0]
else:
mint_url = wallet.url
# load this mint_url into a wallet # load this mint_url into a wallet
mint_wallet = await Wallet.with_db( mint_wallet = await Wallet.with_db(
@@ -109,12 +114,15 @@ async def get_mint_wallet(ctx: Context, force_select: bool = False):
return mint_wallet return mint_wallet
async def print_mint_balances(wallet: Wallet, show_mints: bool = False): async def print_mint_balances(
wallet: Wallet, show_mints: bool = False, mint_balances=None
):
""" """
Helper function that prints the balances for each mint URL that we have tokens from. Helper function that prints the balances for each mint URL that we have tokens from.
""" """
# get balances per mint # get balances per mint
mint_balances = await wallet.balance_per_minturl(unit=wallet.unit) mint_balances = mint_balances or await wallet.balance_per_minturl(unit=wallet.unit)
logger.trace(mint_balances)
# if we have a balance on a non-default mint, we show its URL # if we have a balance on a non-default mint, we show its URL
keysets = [k for k, v in wallet.balance_per_keyset().items()] keysets = [k for k, v in wallet.balance_per_keyset().items()]
for k in keysets: for k in keysets:

View File

@@ -3,7 +3,7 @@ from typing import Optional
from loguru import logger from loguru import logger
from ..core.base import TokenV3, TokenV4 from ..core.base import Token, TokenV3, TokenV4
from ..core.db import Database from ..core.db import Database
from ..core.helpers import sum_proofs from ..core.helpers import sum_proofs
from ..core.migrations import migrate_databases from ..core.migrations import migrate_databases
@@ -34,7 +34,7 @@ async def list_mints(wallet: Wallet):
return mints return mints
async def redeem_TokenV3_multimint(wallet: Wallet, token: TokenV3) -> Wallet: async def redeem_TokenV3(wallet: Wallet, token: TokenV3) -> Wallet:
""" """
Helper function to iterate thruogh a token with multiple mints and redeem them from Helper function to iterate thruogh a token with multiple mints and redeem them from
these mints one keyset at a time. these mints one keyset at a time.
@@ -46,9 +46,7 @@ async def redeem_TokenV3_multimint(wallet: Wallet, token: TokenV3) -> Wallet:
token.unit = keysets[0].unit.name token.unit = keysets[0].unit.name
for t in token.token: for t in token.token:
assert t.mint, Exception( assert t.mint, Exception("redeem_TokenV3: multimint redeem without URL")
"redeem_TokenV3_multimint: multimint redeem without URL"
)
mint_wallet = await Wallet.with_db( mint_wallet = await Wallet.with_db(
t.mint, t.mint,
os.path.join(settings.cashu_dir, wallet.name), os.path.join(settings.cashu_dir, wallet.name),
@@ -74,12 +72,23 @@ async def redeem_TokenV4(wallet: Wallet, token: TokenV4) -> Wallet:
return wallet return wallet
def deserialize_token_from_string(token: str) -> TokenV4: async def redeem_universal(wallet: Wallet, token: Token) -> Wallet:
# deserialize token if isinstance(token, TokenV3):
return await redeem_TokenV3(wallet, token)
if isinstance(token, TokenV4):
return await redeem_TokenV4(wallet, token)
raise Exception("Invalid token type")
def deserialize_token_from_string(token: str) -> Token:
# deserialize token
if token.startswith("cashuA"): if token.startswith("cashuA"):
tokenV3Obj = TokenV3.deserialize(token) tokenV3Obj = TokenV3.deserialize(token)
try:
return TokenV4.from_tokenv3(tokenV3Obj) return TokenV4.from_tokenv3(tokenV3Obj)
except ValueError as e:
logger.debug(f"Could not convert TokenV3 to TokenV4: {e}")
return tokenV3Obj
if token.startswith("cashuB"): if token.startswith("cashuB"):
tokenObj = TokenV4.deserialize(token) tokenObj = TokenV4.deserialize(token)
return tokenObj return tokenObj
@@ -89,14 +98,9 @@ def deserialize_token_from_string(token: str) -> TokenV4:
async def receive( async def receive(
wallet: Wallet, wallet: Wallet,
tokenObj: TokenV4, token: Token,
) -> Wallet: ) -> Wallet:
# redeem tokens with new wallet instances mint_wallet = await redeem_universal(wallet, token)
mint_wallet = await redeem_TokenV4(
wallet,
tokenObj,
)
# reload main wallet so the balance updates # reload main wallet so the balance updates
await wallet.load_proofs(reload=True) await wallet.load_proofs(reload=True)
return mint_wallet return mint_wallet

View File

@@ -6,7 +6,7 @@ import click
from httpx import ConnectError from httpx import ConnectError
from loguru import logger from loguru import logger
from ..core.base import TokenV4 from ..core.base import Token
from ..core.settings import settings from ..core.settings import settings
from ..nostr.client.client import NostrClient from ..nostr.client.client import NostrClient
from ..nostr.event import Event from ..nostr.event import Event
@@ -127,18 +127,13 @@ async def receive_nostr(
for w in words: for w in words:
try: try:
# call the receive method # call the receive method
tokenObj: TokenV4 = deserialize_token_from_string(w) tokenObj: Token = deserialize_token_from_string(w)
print( print(
f"Receiving {tokenObj.amount} sat on mint" f"Receiving {tokenObj.amount} sat on mint"
f" {tokenObj.mint} from nostr user {event.public_key} at" f" {tokenObj.mint} from nostr user {event.public_key} at"
f" {date_str}" f" {date_str}"
) )
asyncio.run( asyncio.run(receive(wallet, tokenObj))
receive(
wallet,
tokenObj,
)
)
logger.trace( logger.trace(
"Nostr: setting last check timestamp to" "Nostr: setting last check timestamp to"
f" {event.created_at} ({date_str})" f" {event.created_at} ({date_str})"

View File

@@ -106,6 +106,11 @@ class WalletProofs(SupportsDb, SupportsKeysets):
Returns: Returns:
str: Serialized Cashu token str: Serialized Cashu token
""" """
# DEPRECATED: legacy token for base64 keysets
try:
_ = [bytes.fromhex(p.id) for p in proofs]
except ValueError:
legacy = True
if legacy: if legacy:
tokenv3 = await self._make_tokenv3(proofs, memo) tokenv3 = await self._make_tokenv3(proofs, memo)
@@ -127,23 +132,27 @@ class WalletProofs(SupportsDb, SupportsKeysets):
Returns: Returns:
TokenV3: TokenV3 object TokenV3: TokenV3 object
""" """
# extract all keysets IDs from proofs
keyset_ids = self._get_proofs_keyset_ids(proofs)
keysets = {k.id: k for k in self.keysets.values() if k.id in keyset_ids}
assert (
len(set([k.unit for k in keysets.values()])) == 1
), "All keysets must have the same unit"
unit = keysets[list(keysets.keys())[0]].unit
token = TokenV3() token = TokenV3()
token.memo = memo
# we create a map from mint url to keyset id and then group token.unit = unit.name
# all proofs with their mint url to build a tokenv3 assert token.memo == memo, f"Memo not set correctly: {token.memo}"
# extract all keysets from proofs
keysets = self._get_proofs_keyset_ids(proofs)
# get all mint URLs for all unique keysets from db # get all mint URLs for all unique keysets from db
mint_urls = await self._get_keyset_urls(keysets) mint_urls = await self._get_keyset_urls(list(keysets.keys()))
# append all url-grouped proofs to token # append all url-grouped proofs to token
for url, ids in mint_urls.items(): for url, ids in mint_urls.items():
mint_proofs = [p for p in proofs if p.id in ids] mint_proofs = [p for p in proofs if p.id in ids]
token.token.append(TokenV3Token(mint=url, proofs=mint_proofs)) token.token.append(TokenV3Token(mint=url, proofs=mint_proofs))
if memo:
token.memo = memo
return token return token
async def _make_tokenv4( async def _make_tokenv4(

View File

@@ -120,6 +120,7 @@ class Wallet(
name: str = "no_name", name: str = "no_name",
skip_db_read: bool = False, skip_db_read: bool = False,
unit: str = "sat", unit: str = "sat",
load_all_keysets: bool = False,
): ):
"""Initializes a wallet with a database and initializes the private key. """Initializes a wallet with a database and initializes the private key.
@@ -130,6 +131,9 @@ class Wallet(
skip_db_read (bool, optional): If true, values from db like private key and skip_db_read (bool, optional): If true, values from db like private key and
keysets are not loaded. Useful for running only migrations and returning. keysets are not loaded. Useful for running only migrations and returning.
Defaults to False. Defaults to False.
unit (str, optional): Unit of the wallet. Defaults to "sat".
load_all_keysets (bool, optional): If true, all keysets are loaded from the database.
Defaults to False.
Returns: Returns:
Wallet: Initialized wallet. Wallet: Initialized wallet.
@@ -137,16 +141,23 @@ class Wallet(
logger.trace(f"Initializing wallet with database: {db}") logger.trace(f"Initializing wallet with database: {db}")
self = cls(url=url, db=db, name=name, unit=unit) self = cls(url=url, db=db, name=name, unit=unit)
await self._migrate_database() await self._migrate_database()
if not skip_db_read:
if skip_db_read:
return self
logger.trace("Mint init: loading private key and keysets from db.") logger.trace("Mint init: loading private key and keysets from db.")
await self._init_private_key() await self._init_private_key()
keysets_list = await get_keysets(mint_url=url, db=self.db) keysets_list = await get_keysets(
mint_url=url if not load_all_keysets else None, db=self.db
)
if not load_all_keysets:
keysets_active_unit = [k for k in keysets_list if k.unit == self.unit] keysets_active_unit = [k for k in keysets_list if k.unit == self.unit]
self.keysets = {k.id: k for k in keysets_active_unit} self.keysets = {k.id: k for k in keysets_active_unit}
else:
self.keysets = {k.id: k for k in keysets_list}
logger.debug( logger.debug(
f"Loaded keysets: {' '.join([k.id + f' {k.unit}' for k in keysets_active_unit])}" f"Loaded keysets: {' '.join([i + f' {k.unit}' for i, k in self.keysets.items()])}"
) )
return self return self
async def _migrate_database(self): async def _migrate_database(self):
@@ -173,7 +184,6 @@ class Wallet(
logger.trace("Loading mint keysets.") logger.trace("Loading mint keysets.")
mint_keysets_resp = await self._get_keysets() mint_keysets_resp = await self._get_keysets()
mint_keysets_dict = {k.id: k for k in mint_keysets_resp} mint_keysets_dict = {k.id: k for k in mint_keysets_resp}
# load all keysets of thisd mint from the db # load all keysets of thisd mint from the db
keysets_in_db = await get_keysets(mint_url=self.url, db=self.db) keysets_in_db = await get_keysets(mint_url=self.url, db=self.db)
@@ -274,7 +284,7 @@ class Wallet(
logger.debug(f"Could not load mint info: {e}") logger.debug(f"Could not load mint info: {e}")
pass pass
async def load_proofs(self, reload: bool = False) -> None: async def load_proofs(self, reload: bool = False, all_keysets=False) -> None:
"""Load all proofs of the selected mint and unit (i.e. self.keysets) into memory.""" """Load all proofs of the selected mint and unit (i.e. self.keysets) into memory."""
if self.proofs and not reload: if self.proofs and not reload:
@@ -284,6 +294,10 @@ class Wallet(
self.proofs = [] self.proofs = []
await self.load_keysets_from_db() await self.load_keysets_from_db()
async with self.db.connect() as conn: async with self.db.connect() as conn:
if all_keysets:
proofs = await get_proofs(db=self.db, conn=conn)
self.proofs.extend(proofs)
else:
for keyset_id in self.keysets: for keyset_id in self.keysets:
proofs = await get_proofs(db=self.db, id=keyset_id, conn=conn) proofs = await get_proofs(db=self.db, id=keyset_id, conn=conn)
self.proofs.extend(proofs) self.proofs.extend(proofs)

View File

@@ -3,6 +3,7 @@ import pytest
from cashu.core.base import TokenV3, TokenV4, Unit from cashu.core.base import TokenV3, TokenV4, Unit
from cashu.core.helpers import calculate_number_of_blank_outputs from cashu.core.helpers import calculate_number_of_blank_outputs
from cashu.core.split import amount_split from cashu.core.split import amount_split
from cashu.wallet.helpers import deserialize_token_from_string
def test_get_output_split(): def test_get_output_split():
@@ -10,98 +11,33 @@ def test_get_output_split():
def test_tokenv3_deserialize_get_attributes(): def test_tokenv3_deserialize_get_attributes():
token_str = ( token_str = "cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjAwYWQyNjhjNGQxZjU4MjYiLCAiYW1vdW50IjogOCwgInNlY3JldCI6ICJjNTA5YzM4MmM2NjJkYWJiYjRkMGM1ZjllYTI1NjAwZTNhYjViMTIzYWNlNmNiNzljYTM1OWE4NTQwOGZlY2I3IiwgIkMiOiAiMDMwZTNkNDdkM2NlMjNkZTkzNTM3MjQ1NGJjOTMxMTJjZmExN2VmYWNkYjZjNWM2NDNmODVjOGFmM2JlNWQwMWEwIn0sIHsiaWQiOiAiMDBhZDI2OGM0ZDFmNTgyNiIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjgxYjhiYjFhN2Q2MGQwZGZiMjkxNmZjZmU4NzUxZmRhZGJjZTU2NDZmMmEyYTQzY2FkMDY4YjUzNzJlN2M5NGMiLCAiQyI6ICIwMzUxN2E0OGYxMmU0NWQ0YzU4ZGUyMTZhNDNjYzgxNDMwMjMxY2YyYjA4OWQzMjY3MDlkMGYyZDAwYjc0N2VmYzcifV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XSwgInVuaXQiOiAic2F0In0="
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOGUyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiMDIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token = TokenV3.deserialize(token_str) token = TokenV3.deserialize(token_str)
assert token.get_amount() == 10 assert token.amount == 10
assert len(token.get_proofs()) == 2 assert len(token.proofs) == 2
def test_tokenv3_deserialize_serialize(): def test_tokenv3_deserialize_serialize():
token_str = ( token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6ImVmNTRkOTg2NDQxNjA1MjY3YzZhNmU3MzJmZWZlMWRhNzViNWU5ZmY3MzZkODQxNmYwYmE4MmM4OTNlMWUyYWUiLCJDIjoiMDI2OTQ4YWFlY2FiZjJlZGVjYWU1M2YzYWIyMjNkZGFhMTRhNmY4MjJhZWNjZGMxYjAxNmVlODg0NDYwYjBjMTVjIn0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImZkMmZkZWI1NzI0N2QzZTJlNjg4YmEyZDI1OGYzN2U0NjY4ZjI2MGM1MGUzZDBjOWRkNWE5Njk0YjQ1ZmQ4OWMiLCJDIjoiMDNjNjc0NWQ5MjA1NDAzMTk3NzA1YWIyN2M1YzEzNDMwNjdmYmU1MTZhMTM2NDE4M2MzMTBmZDY5MmZkNGQzM2ZjIn1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJh"
"bW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOG"
"UyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiM"
"DIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token = TokenV3.deserialize(token_str) token = TokenV3.deserialize(token_str)
assert token.serialize() == token_str assert token.serialize() == token_str
def test_tokenv3_deserialize_serialize_with_dleq(): def test_tokenv3_deserialize_serialize_with_dleq():
token_str = ( token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIiwiZGxlcSI6eyJlIjoiNThlOGYzYTVkOGE2M2M1NGJkMjM5YzE4ZWJkMWUxZWFiZmJkZWMyMzhkNDBjZWExOGJjOWJmY2M0NjIyNGRjYyIsInMiOiIwNGYwM2FkMTA3MTE4NGQzZWIyOTNlYjRhMWI3MGY1OTQ0Mjg1NmJhYzNmNWJjZDE2OWJkMmVhOGVkNmY0NjlhIiwiciI6IjExZDUwMjRhM2U3N2Q0MzNhN2VjMTgwOGE5NzgzNGY2MzlhYjVkYjZhNjZhNmQzYWZlM2M4NGUyNmEzZWM3MDcifX0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIiwiZGxlcSI6eyJlIjoiMmQzNTc2YzU3ZTM5ZjFiMzAzN2RmYjdhYmViOTE2M2I2ZGIxMjExMTBjNTZiY2NkYzhmMTcyN2MzZTg4NjQyNyIsInMiOiIzNDg2M2MxNDU5ZmI5MTk4ZjNhNjAyYzZhMWRkYmExNzc0NWUzN2M2ZGNiMjNiMmQxMmU3NGM3YzE3MjZiOWYwIiwiciI6IjYxNzRlNjBiZjU4MGYyOTBiNTIwZjMxYzc5MjBlNTA3MDkxNmJmYzRmZTc0MDNhNjY5ZThlNmEzOThhNGQ3YTcifX1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93M"
"SIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZ"
"DI4ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZD"
"lkMzIzYTAxOWJlNTE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZ"
"TQzZTUiLCAiZGxlcSI6IHsiZSI6ICI1ZjkxMGQ4NTc0M2U0OTI0ZjRiNjlkNzhjM"
"jFjYTc1ZjEzNzg3Zjc3OTE1NWRmMjMzMjJmYTA1YjU5ODdhYzNmIiwgInMiOiAiZTc4Y2U0MzNiZ"
"WNlZTNjNGU1NzM4ZDdjMzRlNDQyZWQ0MmJkMzk0MjI0ZTc3MjE4OGFjMmI5MzZmM"
"jA2Y2QxYSIsICJyIjogIjI3MzM3ODNmOTQ4MWZlYzAxNzdlYmM4ZjBhOTI2OWVjOGFkNzU5MDU2ZT"
"k3MTRiMWEwYTEwMDQ3MmY2Y2Y5YzIifX0sIHsiaWQiOiAiMWNDTklBWjJYL3cxIi"
"wgImFtb3VudCI6IDgsICJzZWNyZXQiOiAiMmFkNDMyZDRkNTg2MzJiMmRlMzI0ZmQxYmE5OTcyZmE"
"4MDljNmU3ZGE1ZTkyZWVmYjBiNjYxMmQ5M2Q3ZTAwMCIsICJDIjogIjAzMmFmYjg"
"zOWQwMmRmMWNhOGY5ZGZjNTI1NzUxN2Q0MzY4YjdiMTc0MzgzM2JlYWUzZDQzNmExYmQwYmJkYjVk"
"OCIsICJkbGVxIjogeyJlIjogImY0NjM2MzU5YTUzZGQxNGEyNmUyNTMyMDQxZWIx"
"MDE2OTk1ZTg4NzgwODY0OWFlY2VlNTcwZTA5ZTk2NTU3YzIiLCAicyI6ICJmZWYzMGIzMDcwMDJkMW"
"VjNWZiZjg0ZGZhZmRkMGEwOTdkNDJlMDYxNTZiNzdiMTMzMmNjNGZjNGNjYWEyOD"
"JmIiwgInIiOiAiODQ5MjQxNzBlYzc3ZjhjMDNmZDRlZTkyZTA3MjdlMzYyNTliZjRhYTc4NTBjZTc2"
"NDExMDQ0MmNlNmVlM2FjYyJ9fV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XX0="
)
token = TokenV3.deserialize(token_str) token = TokenV3.deserialize(token_str)
assert token.serialize(include_dleq=True) == token_str assert token.serialize(include_dleq=True) == token_str
def test_tokenv3_deserialize_serialize_no_dleq(): def test_tokenv3_deserialize_serialize_no_dleq():
token_str = ( token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIiwiZGxlcSI6eyJlIjoiNThlOGYzYTVkOGE2M2M1NGJkMjM5YzE4ZWJkMWUxZWFiZmJkZWMyMzhkNDBjZWExOGJjOWJmY2M0NjIyNGRjYyIsInMiOiIwNGYwM2FkMTA3MTE4NGQzZWIyOTNlYjRhMWI3MGY1OTQ0Mjg1NmJhYzNmNWJjZDE2OWJkMmVhOGVkNmY0NjlhIiwiciI6IjExZDUwMjRhM2U3N2Q0MzNhN2VjMTgwOGE5NzgzNGY2MzlhYjVkYjZhNjZhNmQzYWZlM2M4NGUyNmEzZWM3MDcifX0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIiwiZGxlcSI6eyJlIjoiMmQzNTc2YzU3ZTM5ZjFiMzAzN2RmYjdhYmViOTE2M2I2ZGIxMjExMTBjNTZiY2NkYzhmMTcyN2MzZTg4NjQyNyIsInMiOiIzNDg2M2MxNDU5ZmI5MTk4ZjNhNjAyYzZhMWRkYmExNzc0NWUzN2M2ZGNiMjNiMmQxMmU3NGM3YzE3MjZiOWYwIiwiciI6IjYxNzRlNjBiZjU4MGYyOTBiNTIwZjMxYzc5MjBlNTA3MDkxNmJmYzRmZTc0MDNhNjY5ZThlNmEzOThhNGQ3YTcifX1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhb" token_str_no_dleq = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIn0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIn1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
"W91bnQiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZ"
"DI4ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZDlkMzIzY"
"TAxOWJlNTE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZ"
"TQzZTUiLCAiZGxlcSI6IHsiZSI6ICI1ZjkxMGQ4NTc0M2U0OTI0ZjRiNjlkNzhjMjFjYTc1Z"
"jEzNzg3Zjc3OTE1NWRmMjMzMjJmYTA1YjU5ODdhYzNmIiwgInMiOiAiZTc4Y2U0MzNiZ"
"WNlZTNjNGU1NzM4ZDdjMzRlNDQyZWQ0MmJkMzk0MjI0ZTc3MjE4OGFjMmI5MzZmMjA2Y2QxY"
"SIsICJyIjogIjI3MzM3ODNmOTQ4MWZlYzAxNzdlYmM4ZjBhOTI2OWVjOGFkNzU5MDU2ZT"
"k3MTRiMWEwYTEwMDQ3MmY2Y2Y5YzIifX0sIHsiaWQiOiAiMWNDTklBWjJYL3cxIiwgImFtb3"
"VudCI6IDgsICJzZWNyZXQiOiAiMmFkNDMyZDRkNTg2MzJiMmRlMzI0ZmQxYmE5OTcyZmE"
"4MDljNmU3ZGE1ZTkyZWVmYjBiNjYxMmQ5M2Q3ZTAwMCIsICJDIjogIjAzMmFmYjgzOWQwMmR"
"mMWNhOGY5ZGZjNTI1NzUxN2Q0MzY4YjdiMTc0MzgzM2JlYWUzZDQzNmExYmQwYmJkYjVk"
"OCIsICJkbGVxIjogeyJlIjogImY0NjM2MzU5YTUzZGQxNGEyNmUyNTMyMDQxZWIxMDE2OTk1"
"ZTg4NzgwODY0OWFlY2VlNTcwZTA5ZTk2NTU3YzIiLCAicyI6ICJmZWYzMGIzMDcwMDJkMW"
"VjNWZiZjg0ZGZhZmRkMGEwOTdkNDJlMDYxNTZiNzdiMTMzMmNjNGZjNGNjYWEyODJmIiwgIn"
"IiOiAiODQ5MjQxNzBlYzc3ZjhjMDNmZDRlZTkyZTA3MjdlMzYyNTliZjRhYTc4NTBjZTc2"
"NDExMDQ0MmNlNmVlM2FjYyJ9fV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XX0="
)
token_str_no_dleq = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhbW91bn"
"QiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZDI4"
"ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZDlkMzIzYTAxOWJlN"
"TE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZTQzZTU"
"ifSwgeyJpZCI6ICIxY0NOSUFaMlgvdzEiLCAiYW1vdW50IjogOCwgInNlY3JldCI6ICIyYWQ0MzJkN"
"GQ1ODYzMmIyZGUzMjRmZDFiYTk5NzJmYTgwOWM2ZTdkYTVlOTJlZWZiMGI2NjEyZD"
"kzZDdlMDAwIiwgIkMiOiAiMDMyYWZiODM5ZDAyZGYxY2E4ZjlkZmM1MjU3NTE3ZDQzNjhiN2IxNzQz"
"ODMzYmVhZTNkNDM2YTFiZDBiYmRiNWQ4In1dLCAibWludCI6ICJodHRwOi8vbG9jY"
"Wxob3N0OjMzMzgifV19"
)
token = TokenV3.deserialize(token_str) token = TokenV3.deserialize(token_str)
assert token.serialize(include_dleq=False) == token_str_no_dleq assert token.serialize(include_dleq=False) == token_str_no_dleq
def test_tokenv3_deserialize_with_memo(): def test_tokenv3_deserialize_with_memo():
token_str = ( token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjNlNDlhMGQzNzllMWQ1YTY3MjhiYzUwMjM4YTRjZDFlMjBiY2M5MjM4MjAxMDg0MzcyNjdhNWZkZDM2NWZiMDYiLCJDIjoiMDIyYWQwODg5ZmVkNWE0YWNjODEwYTZhZTk4MTc0YjFlZGM2OTkwMWI0OTdkNTYzYmM5NjEyMjVlYzMwOGVkMTVkIn0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImNmNjhhNTQ3ZWY2ZDVhNGFkZTI0ZGM5MDU5ZTE5ZmJkZDU0NmQ5MGE1OWI0ODE5MzdmN2FjNmRiNWMwZjFkMTUiLCJDIjoiMDMyZWQ5ZGQ3MzExMTg1ODk1NTFiM2E5YjJhNTM5YWZlYTcxOTU3OGZhNTI1ZTVmMmJkY2M4YjNlMzhjNjJkOTRjIn1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJtZW1vIjoiVGVzdCBtZW1vIiwidW5pdCI6InNhdCJ9"
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjV"
"QRjFnNFpWMnciLCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOGUyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwg"
"eyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiMDIyZDAwNGY5Z"
"WMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0Oj"
"MzMzgifV0sICJtZW1vIjogIlRlc3QgbWVtbyJ9"
)
token = TokenV3.deserialize(token_str) token = TokenV3.deserialize(token_str)
assert token.serialize() == token_str assert token.serialize() == token_str
assert token.memo == "Test memo" assert token.memo == "Test memo"
@@ -114,35 +50,38 @@ def test_tokenv3_serialize_example_token_nut00():
"mint": "https://8333.space:3338", "mint": "https://8333.space:3338",
"proofs": [ "proofs": [
{ {
"id": "9bb9d58392cd823e",
"amount": 2, "amount": 2,
"secret": "EhpennC9qB3iFlW8FZ_pZw", "id": "009a1f293253e41e",
"C": "02c020067db727d586bc3183aecf97fcb800c3f4cc4759f69c626c9db5d8f5b5d4", "secret": "407915bc212be61a77e3e6d2aeb4c727980bda51cd06a6afc29e2861768a7837",
"C": "02bc9097997d81afb2cc7346b5e4345a9346bd2a506eb7958598a72f0cf85163ea",
}, },
{ {
"id": "9bb9d58392cd823e",
"amount": 8, "amount": 8,
"secret": "TmS6Cv0YT5PU_5ATVKnukw", "id": "009a1f293253e41e",
"C": "02ac910bef28cbe5d7325415d5c263026f15f9b967a079ca9779ab6e5c2db133a7", "secret": "fe15109314e61d7756b0f8ee0f23a624acaa3f4e042f61433c728c7057b931be",
"C": "029e8e5050b890a7d6c0968db16bc1d5d5fa040ea1de284f6ec69d61299f671059",
}, },
], ],
} }
], ],
"unit": "sat",
"memo": "Thank you.", "memo": "Thank you.",
} }
tokenObj = TokenV3.parse_obj(token_dict) tokenObj = TokenV3.parse_obj(token_dict)
# NOTE: The serialized token here is different from the example in NUT-00 because the order of keys in the JSON is different in our seiralization
encoded_token = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwOWExZjI5MzI1M2U0MWUiLCJhbW91bnQiOjIsInNlY3JldCI6IjQwNzkxNWJjMjEyYmU2MWE3N2UzZTZkMmFlYjRjNzI3OTgwYmRhNTFjZDA2YTZhZmMyOWUyODYxNzY4YTc4MzciLCJDIjoiMDJiYzkwOTc5OTdkODFhZmIyY2M3MzQ2YjVlNDM0NWE5MzQ2YmQyYTUwNmViNzk1ODU5OGE3MmYwY2Y4NTE2M2VhIn0seyJpZCI6IjAwOWExZjI5MzI1M2U0MWUiLCJhbW91bnQiOjgsInNlY3JldCI6ImZlMTUxMDkzMTRlNjFkNzc1NmIwZjhlZTBmMjNhNjI0YWNhYTNmNGUwNDJmNjE0MzNjNzI4YzcwNTdiOTMxYmUiLCJDIjoiMDI5ZThlNTA1MGI4OTBhN2Q2YzA5NjhkYjE2YmMxZDVkNWZhMDQwZWExZGUyODRmNmVjNjlkNjEyOTlmNjcxMDU5In1dLCJtaW50IjoiaHR0cHM6Ly84MzMzLnNwYWNlOjMzMzgifV0sIm1lbW8iOiJUaGFuayB5b3UuIiwidW5pdCI6InNhdCJ9"
encoded_token_nut00 = "cashuAeyJ0b2tlbiI6W3sibWludCI6Imh0dHBzOi8vODMzMy5zcGFjZTozMzM4IiwicHJvb2ZzIjpbeyJhbW91bnQiOjIsImlkIjoiMDA5YTFmMjkzMjUzZTQxZSIsInNlY3JldCI6IjQwNzkxNWJjMjEyYmU2MWE3N2UzZTZkMmFlYjRjNzI3OTgwYmRhNTFjZDA2YTZhZmMyOWUyODYxNzY4YTc4MzciLCJDIjoiMDJiYzkwOTc5OTdkODFhZmIyY2M3MzQ2YjVlNDM0NWE5MzQ2YmQyYTUwNmViNzk1ODU5OGE3MmYwY2Y4NTE2M2VhIn0seyJhbW91bnQiOjgsImlkIjoiMDA5YTFmMjkzMjUzZTQxZSIsInNlY3JldCI6ImZlMTUxMDkzMTRlNjFkNzc1NmIwZjhlZTBmMjNhNjI0YWNhYTNmNGUwNDJmNjE0MzNjNzI4YzcwNTdiOTMxYmUiLCJDIjoiMDI5ZThlNTA1MGI4OTBhN2Q2YzA5NjhkYjE2YmMxZDVkNWZhMDQwZWExZGUyODRmNmVjNjlkNjEyOTlmNjcxMDU5In1dfV0sInVuaXQiOiJzYXQiLCJtZW1vIjoiVGhhbmsgeW91LiJ9"
assert ( assert (
tokenObj.serialize() tokenObj.serialize() == encoded_token
== "cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjliYjlkNTgzOTJjZDg" # NUT-00 example:
"yM2UiLCAiYW1vdW50IjogMiwgInNlY3JldCI6ICJFaHBlbm5DOXFCM2lGbFc4Rlpf" # == encoded_token_nut00
"cFp3IiwgIkMiOiAiMDJjMDIwMDY3ZGI3MjdkNTg2YmMzMTgzYWVjZjk3ZmNiODAwY"
"zNmNGNjNDc1OWY2OWM2MjZjOWRiNWQ4ZjViNWQ0In0sIHsiaWQiOiAiOWJiOWQ1OD"
"M5MmNkODIzZSIsICJhbW91bnQiOiA4LCAic2VjcmV0IjogIlRtUzZDdjBZVDVQVV8"
"1QVRWS251a3ciLCAiQyI6ICIwMmFjOTEwYmVmMjhjYmU1ZDczMjU0MTVkNWMyNjMw"
"MjZmMTVmOWI5NjdhMDc5Y2E5Nzc5YWI2ZTVjMmRiMTMzYTcifV0sICJtaW50IjogI"
"mh0dHBzOi8vODMzMy5zcGFjZTozMzM4In1dLCAibWVtbyI6ICJUaGFuayB5b3UuIn0="
) )
# to make sure the serialization is correct, we deserialize the token and compare it with the original token
token_1 = TokenV3.deserialize(encoded_token)
token_2 = TokenV3.deserialize(encoded_token_nut00)
assert token_1.serialize() == token_2.serialize()
def test_tokenv4_deserialize_get_attributes(): def test_tokenv4_deserialize_get_attributes():
token_str = "cashuBo2F0gaJhaUgArSaMTR9YJmFwgqNhYQJhc3hAMDZlM2UzZjY4NDRiOGZkOGQ3NDMwODY1MjY3MjQ5YWU3NjdhMzg5MDBjODdkNGE0ZDMxOGY4MTJmNzkzN2ZiMmFjWCEDXDG_wzG35Lu4vcAtiycLSQlNqH65afih9N2SrFJn3GCjYWEIYXN4QDBmNTE5YjgwOWZlNmQ5MzZkMjVhYmU1YjhjYTZhMDRlNDc3OTJjOTI0YTkwZWRmYjU1MmM1ZjkzODJkNzFjMDJhY1ghA4CNH8dD8NNt715E37Ar65X6p6uBUoDbe8JipQp81TIgYW11aHR0cDovL2xvY2FsaG9zdDozMzM4YXVjc2F0" token_str = "cashuBo2F0gaJhaUgArSaMTR9YJmFwgqNhYQJhc3hAMDZlM2UzZjY4NDRiOGZkOGQ3NDMwODY1MjY3MjQ5YWU3NjdhMzg5MDBjODdkNGE0ZDMxOGY4MTJmNzkzN2ZiMmFjWCEDXDG_wzG35Lu4vcAtiycLSQlNqH65afih9N2SrFJn3GCjYWEIYXN4QDBmNTE5YjgwOWZlNmQ5MzZkMjVhYmU1YjhjYTZhMDRlNDc3OTJjOTI0YTkwZWRmYjU1MmM1ZjkzODJkNzFjMDJhY1ghA4CNH8dD8NNt715E37Ar65X6p6uBUoDbe8JipQp81TIgYW11aHR0cDovL2xvY2FsaG9zdDozMzM4YXVjc2F0"
@@ -273,3 +212,39 @@ def test_calculate_number_of_blank_outputs_fails_for_negative_fee_reserve():
fee_reserve_sat = -1 fee_reserve_sat = -1
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
_ = calculate_number_of_blank_outputs(fee_reserve_sat) _ = calculate_number_of_blank_outputs(fee_reserve_sat)
def test_parse_token_v3_v4_base64_keyset_id():
token_dict = {
"token": [
{
"mint": "https://localhost:3338",
"proofs": [
{
"amount": 2,
"id": "009a1f293253e41e",
"secret": "407915bc212be61a77e3e6d2aeb4c727980bda51cd06a6afc29e2861768a7837",
"C": "02bc9097997d81afb2cc7346b5e4345a9346bd2a506eb7958598a72f0cf85163ea",
},
],
}
],
}
token_v3 = TokenV3.parse_obj(token_dict)
token_v3_serialized = token_v3.serialize()
# this token can be serialized to V4
token = deserialize_token_from_string(token_v3_serialized)
assert isinstance(token, TokenV4)
# Now let's do the same with a base64 token
token_dict_base64_keyset = token_dict.copy()
token_dict_base64_keyset["token"][0]["proofs"][0]["id"] = "yjzQhxghPdrr" # type: ignore
token_v3_base64_keyset = TokenV3.parse_obj(token_dict_base64_keyset)
token_v3_base64_keyset_serialized = token_v3_base64_keyset.serialize()
# this token can not be serialized to V4
token = deserialize_token_from_string(token_v3_base64_keyset_serialized)
assert isinstance(token, TokenV3)