TokenV4 CBOR serialization (#502)

* WIP: cashuB with CBOR

* working

* tokenv4 works

* fix mypy
This commit is contained in:
callebtc
2024-06-30 14:36:44 +02:00
committed by GitHub
parent 1d8b5cd5ca
commit 2739c3127a
12 changed files with 579 additions and 416 deletions

View File

@@ -4,8 +4,9 @@ import math
from dataclasses import dataclass from dataclasses import dataclass
from enum import Enum from enum import Enum
from sqlite3 import Row from sqlite3 import Row
from typing import Dict, List, Optional, Union from typing import Any, Dict, List, Optional, Union
import cbor2
from loguru import logger from loguru import logger
from pydantic import BaseModel, root_validator from pydantic import BaseModel, root_validator
@@ -747,43 +748,6 @@ class MintKeyset:
# ------- TOKEN ------- # ------- TOKEN -------
class TokenV1(BaseModel):
"""
A (legacy) Cashu token that includes proofs. This can only be received if the receiver knows the mint associated with the
keyset ids of the proofs.
"""
# NOTE: not used in Pydantic validation
__root__: List[Proof]
class TokenV2Mint(BaseModel):
"""
Object that describes how to reach the mints associated with the proofs in a TokenV2 object.
"""
url: str # mint URL
ids: List[str] # List of keyset id's that are from this mint
class TokenV2(BaseModel):
"""
A Cashu token that includes proofs and their respective mints. Can include proofs from multiple different mints and keysets.
"""
proofs: List[Proof]
mints: Optional[List[TokenV2Mint]] = None
def to_dict(self):
if self.mints:
return dict(
proofs=[p.to_dict() for p in self.proofs],
mints=[m.dict() for m in self.mints],
)
else:
return dict(proofs=[p.to_dict() for p in self.proofs])
class TokenV3Token(BaseModel): class TokenV3Token(BaseModel):
mint: Optional[str] = None mint: Optional[str] = None
proofs: List[Proof] proofs: List[Proof]
@@ -804,14 +768,6 @@ class TokenV3(BaseModel):
memo: Optional[str] = None memo: Optional[str] = None
unit: Optional[str] = None unit: Optional[str] = None
def to_dict(self, include_dleq=False):
return_dict = dict(token=[t.to_dict(include_dleq) for t in self.token])
if self.memo:
return_dict.update(dict(memo=self.memo)) # type: ignore
if self.unit:
return_dict.update(dict(unit=self.unit)) # type: ignore
return return_dict
def get_proofs(self): def get_proofs(self):
return [proof for token in self.token for proof in token.proofs] return [proof for token in self.token for proof in token.proofs]
@@ -824,6 +780,14 @@ class TokenV3(BaseModel):
def get_mints(self): def get_mints(self):
return list(set([t.mint for t in self.token if t.mint])) return list(set([t.mint for t in self.token if t.mint]))
def serialize_to_dict(self, include_dleq=False):
return_dict = dict(token=[t.to_dict(include_dleq) for t in self.token])
if self.memo:
return_dict.update(dict(memo=self.memo)) # type: ignore
if self.unit:
return_dict.update(dict(unit=self.unit)) # type: ignore
return return_dict
@classmethod @classmethod
def deserialize(cls, tokenv3_serialized: str) -> "TokenV3": def deserialize(cls, tokenv3_serialized: str) -> "TokenV3":
""" """
@@ -848,6 +812,230 @@ class TokenV3(BaseModel):
tokenv3_serialized = prefix tokenv3_serialized = prefix
# encode the token as a base64 string # encode the token as a base64 string
tokenv3_serialized += base64.urlsafe_b64encode( tokenv3_serialized += base64.urlsafe_b64encode(
json.dumps(self.to_dict(include_dleq)).encode() json.dumps(self.serialize_to_dict(include_dleq)).encode()
).decode() ).decode()
return tokenv3_serialized return tokenv3_serialized
class TokenV4DLEQ(BaseModel):
"""
Discrete Log Equality (DLEQ) Proof
"""
e: bytes
s: bytes
r: bytes
class TokenV4Proof(BaseModel):
"""
Value token
"""
a: int
s: str # secret
c: bytes # signature
d: Optional[TokenV4DLEQ] = None # DLEQ proof
w: Optional[str] = None # witness
@classmethod
def from_proof(cls, proof: Proof, include_dleq=False):
return cls(
a=proof.amount,
s=proof.secret,
c=bytes.fromhex(proof.C),
d=(
TokenV4DLEQ(
e=bytes.fromhex(proof.dleq.e),
s=bytes.fromhex(proof.dleq.s),
r=bytes.fromhex(proof.dleq.r),
)
if proof.dleq
else None
),
w=proof.witness,
)
class TokenV4Token(BaseModel):
# keyset ID
i: bytes
# proofs
p: List[TokenV4Proof]
class TokenV4(BaseModel):
# mint URL
m: str
# unit
u: str
# tokens
t: List[TokenV4Token]
# memo
d: Optional[str] = None
@property
def mint(self) -> str:
return self.m
@property
def memo(self) -> Optional[str]:
return self.d
@property
def unit(self) -> str:
return self.u
@property
def amounts(self) -> List[int]:
return [p.a for token in self.t for p in token.p]
@property
def amount(self) -> int:
return sum(self.amounts)
@property
def proofs(self) -> List[Proof]:
return [
Proof(
id=token.i.hex(),
amount=p.a,
secret=p.s,
C=p.c.hex(),
dleq=(
DLEQWallet(
e=p.d.e.hex(),
s=p.d.s.hex(),
r=p.d.r.hex(),
)
if p.d
else None
),
witness=p.w,
)
for token in self.t
for p in token.p
]
@classmethod
def from_tokenv3(cls, tokenv3: TokenV3):
if not len(tokenv3.get_mints()) == 1:
raise Exception("TokenV3 must contain proofs from only one mint.")
proofs = tokenv3.get_proofs()
proofs_by_id: Dict[str, List[Proof]] = {}
for proof in proofs:
proofs_by_id.setdefault(proof.id, []).append(proof)
cls.t = []
for keyset_id, proofs in proofs_by_id.items():
cls.t.append(
TokenV4Token(
i=bytes.fromhex(keyset_id),
p=[
TokenV4Proof(
a=p.amount,
s=p.secret,
c=bytes.fromhex(p.C),
d=(
TokenV4DLEQ(
e=bytes.fromhex(p.dleq.e),
s=bytes.fromhex(p.dleq.s),
r=bytes.fromhex(p.dleq.r),
)
if p.dleq
else None
),
w=p.witness,
)
for p in proofs
],
)
)
# set memo
cls.d = tokenv3.memo
# set mint
cls.m = tokenv3.get_mints()[0]
# set unit
cls.u = tokenv3.unit or "sat"
return cls(t=cls.t, d=cls.d, m=cls.m, u=cls.u)
def serialize_to_dict(self, include_dleq=False):
return_dict: Dict[str, Any] = dict(t=[t.dict() for t in self.t])
# strip dleq if needed
if not include_dleq:
for token in return_dict["t"]:
for proof in token["p"]:
if "d" in proof:
del proof["d"]
# strip witness if not present
for token in return_dict["t"]:
for proof in token["p"]:
if not proof.get("w"):
del proof["w"]
# optional memo
if self.d:
return_dict.update(dict(d=self.d))
# mint
return_dict.update(dict(m=self.m))
# unit
return_dict.update(dict(u=self.u))
return return_dict
def serialize(self, include_dleq=False) -> str:
"""
Takes a TokenV4 and serializes it as "cashuB<cbor_urlsafe_base64>.
"""
prefix = "cashuB"
tokenv4_serialized = prefix
# encode the token as a base64 string
tokenv4_serialized += base64.urlsafe_b64encode(
cbor2.dumps(self.serialize_to_dict(include_dleq))
).decode()
return tokenv4_serialized
@classmethod
def deserialize(cls, tokenv4_serialized: str) -> "TokenV4":
"""
Ingesta a serialized "cashuB<cbor_urlsafe_base64>" token and returns a TokenV4.
"""
prefix = "cashuB"
assert tokenv4_serialized.startswith(prefix), Exception(
f"Token prefix not valid. Expected {prefix}."
)
token_base64 = tokenv4_serialized[len(prefix) :]
# if base64 string is not a multiple of 4, pad it with "="
token_base64 += "=" * (4 - len(token_base64) % 4)
token = cbor2.loads(base64.urlsafe_b64decode(token_base64))
return cls.parse_obj(token)
def to_tokenv3(self) -> TokenV3:
tokenv3 = TokenV3()
for token in self.t:
tokenv3.token.append(
TokenV3Token(
mint=self.m,
proofs=[
Proof(
id=token.i.hex(),
amount=p.a,
secret=p.s,
C=p.c.hex(),
dleq=(
DLEQWallet(
e=p.d.e.hex(),
s=p.d.s.hex(),
r=p.d.r.hex(),
)
if p.d
else None
),
witness=p.w,
)
for p in token.p
],
)
)
return tokenv3

View File

@@ -1,13 +1,9 @@
from ...core.base import TokenV3 from ...core.base import TokenV4
from ...wallet.crud import get_keysets from ...wallet.crud import get_keysets
async def verify_mints(wallet, tokenObj: TokenV3): async def verify_mints(wallet, tokenObj: TokenV4):
# verify mints # verify mints
mints = set([t.mint for t in tokenObj.token]) mint = tokenObj.mint
if None in mints: mint_keysets = await get_keysets(mint_url=mint, db=wallet.db)
raise Exception("Token has missing mint information.") assert len(mint_keysets), "We don't know this mint."
for mint in mints:
assert mint
mint_keysets = await get_keysets(mint_url=mint, db=wallet.db)
assert len(mint_keysets), "We don't know this mint."

View File

@@ -8,7 +8,7 @@ from typing import Optional
from fastapi import APIRouter, Query from fastapi import APIRouter, Query
from ...core.base import TokenV3 from ...core.base import TokenV3, TokenV4
from ...core.helpers import sum_proofs from ...core.helpers import sum_proofs
from ...core.settings import settings from ...core.settings import settings
from ...lightning.base import ( from ...lightning.base import (
@@ -261,7 +261,7 @@ async def receive_command(
wallet = await mint_wallet() wallet = await mint_wallet()
initial_balance = wallet.available_balance initial_balance = wallet.available_balance
if token: if token:
tokenObj: TokenV3 = deserialize_token_from_string(token) tokenObj: TokenV4 = deserialize_token_from_string(token)
await verify_mints(wallet, tokenObj) await verify_mints(wallet, tokenObj)
await receive(wallet, tokenObj) await receive(wallet, tokenObj)
elif nostr: elif nostr:
@@ -352,7 +352,7 @@ async def pending(
grouped_proofs = list(value) grouped_proofs = list(value)
token = await wallet.serialize_proofs(grouped_proofs) token = await wallet.serialize_proofs(grouped_proofs)
tokenObj = deserialize_token_from_string(token) tokenObj = deserialize_token_from_string(token)
mint = [t.mint for t in tokenObj.token if t.mint][0] mint = tokenObj.mint
reserved_date = datetime.utcfromtimestamp( reserved_date = datetime.utcfromtimestamp(
int(grouped_proofs[0].time_reserved) # type: ignore int(grouped_proofs[0].time_reserved) # type: ignore
).strftime("%Y-%m-%d %H:%M:%S") ).strftime("%Y-%m-%d %H:%M:%S")

View File

@@ -15,7 +15,7 @@ import click
from click import Context from click import Context
from loguru import logger from loguru import logger
from ...core.base import Invoice, Method, MintQuoteState, TokenV3, Unit from ...core.base import Invoice, Method, MintQuoteState, TokenV3, TokenV4, Unit
from ...core.helpers import sum_proofs from ...core.helpers import sum_proofs
from ...core.json_rpc.base import JSONRPCNotficationParams from ...core.json_rpc.base import JSONRPCNotficationParams
from ...core.logging import configure_logger from ...core.logging import configure_logger
@@ -479,11 +479,17 @@ async def balance(ctx: Context, verbose):
@cli.command("send", help="Send tokens.") @cli.command("send", help="Send tokens.")
@click.argument("amount", type=float) @click.argument("amount", type=float)
@click.argument("nostr", type=str, required=False) @click.option(
"--memo",
"-m",
default=None,
help="Memo for the token.",
type=str,
)
@click.option( @click.option(
"--nostr", "--nostr",
"-n", "-n",
"nopt", default=None,
help="Send to nostr pubkey.", help="Send to nostr pubkey.",
type=str, type=str,
) )
@@ -498,9 +504,10 @@ async def balance(ctx: Context, verbose):
) )
@click.option( @click.option(
"--legacy", "--legacy",
"-l",
default=False, default=False,
is_flag=True, is_flag=True,
help="Print legacy token without mint information.", help="Print legacy TokenV3 format.",
type=bool, type=bool,
) )
@click.option( @click.option(
@@ -535,8 +542,8 @@ async def balance(ctx: Context, verbose):
async def send_command( async def send_command(
ctx, ctx,
amount: int, amount: int,
memo: str,
nostr: str, nostr: str,
nopt: str,
lock: str, lock: str,
dleq: bool, dleq: bool,
legacy: bool, legacy: bool,
@@ -547,7 +554,7 @@ async def send_command(
): ):
wallet: Wallet = ctx.obj["WALLET"] wallet: Wallet = ctx.obj["WALLET"]
amount = int(amount * 100) if wallet.unit in [Unit.usd, Unit.eur] else int(amount) amount = int(amount * 100) if wallet.unit in [Unit.usd, Unit.eur] else int(amount)
if not nostr and not nopt: if not nostr:
await send( await send(
wallet, wallet,
amount=amount, amount=amount,
@@ -556,11 +563,10 @@ async def send_command(
offline=offline, offline=offline,
include_dleq=dleq, include_dleq=dleq,
include_fees=include_fees, include_fees=include_fees,
memo=memo,
) )
else: else:
await send_nostr( await send_nostr(wallet, amount=amount, pubkey=nostr, verbose=verbose, yes=yes)
wallet, amount=amount, pubkey=nostr or nopt, verbose=verbose, yes=yes
)
await print_balance(ctx) await print_balance(ctx)
@@ -587,19 +593,18 @@ async def receive_cli(
wallet: Wallet = ctx.obj["WALLET"] wallet: Wallet = ctx.obj["WALLET"]
if token: if token:
tokenObj = deserialize_token_from_string(token) token_obj = deserialize_token_from_string(token)
# verify that we trust all mints in these tokens # verify that we trust the mint in this tokens
# ask the user if they want to trust the new mints # ask the user if they want to trust the new mint
for mint_url in set([t.mint for t in tokenObj.token if t.mint]): mint_url = token_obj.mint
mint_wallet = Wallet( mint_wallet = Wallet(
mint_url, mint_url,
os.path.join(settings.cashu_dir, wallet.name), os.path.join(settings.cashu_dir, wallet.name),
unit=tokenObj.unit or wallet.unit.name, unit=token_obj.unit,
) )
await verify_mint(mint_wallet, mint_url) await verify_mint(mint_wallet, mint_url)
receive_wallet = await receive(wallet, tokenObj) receive_wallet = await receive(wallet, token_obj)
ctx.obj["WALLET"] = receive_wallet ctx.obj["WALLET"] = receive_wallet
elif nostr: elif nostr:
await receive_nostr(wallet) await receive_nostr(wallet)
# exit on keypress # exit on keypress
@@ -612,15 +617,17 @@ async def receive_cli(
for key, value in groupby(reserved_proofs, key=itemgetter("send_id")): # type: ignore for key, value in groupby(reserved_proofs, key=itemgetter("send_id")): # type: ignore
proofs = list(value) proofs = list(value)
token = await wallet.serialize_proofs(proofs) token = await wallet.serialize_proofs(proofs)
tokenObj = TokenV3.deserialize(token) token_obj = TokenV4.deserialize(token)
# verify that we trust all mints in these tokens # verify that we trust the mint of this token
# ask the user if they want to trust the new mints # ask the user if they want to trust the mint
for mint_url in set([t.mint for t in tokenObj.token if t.mint]): mint_url = token_obj.mint
mint_wallet = Wallet( mint_wallet = Wallet(
mint_url, os.path.join(settings.cashu_dir, wallet.name) mint_url,
) os.path.join(settings.cashu_dir, wallet.name),
await verify_mint(mint_wallet, mint_url) unit=token_obj.unit,
receive_wallet = await receive(wallet, tokenObj) )
await verify_mint(mint_wallet, mint_url)
receive_wallet = await receive(wallet, token_obj)
ctx.obj["WALLET"] = receive_wallet ctx.obj["WALLET"] = receive_wallet
else: else:
print("Error: enter token or use either flag --nostr or --all.") print("Error: enter token or use either flag --nostr or --all.")
@@ -665,8 +672,8 @@ async def burn(ctx: Context, token: str, all: bool, force: bool, delete: str):
proofs = [proof for proof in reserved_proofs if proof["send_id"] == delete] proofs = [proof for proof in reserved_proofs if proof["send_id"] == delete]
else: else:
# check only the specified ones # check only the specified ones
tokenObj = TokenV3.deserialize(token) token_obj = TokenV3.deserialize(token)
proofs = tokenObj.get_proofs() proofs = token_obj.get_proofs()
if delete: if delete:
await wallet.invalidate(proofs) await wallet.invalidate(proofs)
@@ -721,8 +728,8 @@ async def pending(ctx: Context, legacy, number: int, offset: int):
grouped_proofs = list(value) grouped_proofs = list(value)
# TODO: we can't return DLEQ because we don't store it # TODO: we can't return DLEQ because we don't store it
token = await wallet.serialize_proofs(grouped_proofs, include_dleq=False) token = await wallet.serialize_proofs(grouped_proofs, include_dleq=False)
tokenObj = deserialize_token_from_string(token) token_obj = deserialize_token_from_string(token)
mint = [t.mint for t in tokenObj.token][0] mint = token_obj.mint
# token_hidden_secret = await wallet.serialize_proofs(grouped_proofs) # token_hidden_secret = await wallet.serialize_proofs(grouped_proofs)
assert grouped_proofs[0].time_reserved assert grouped_proofs[0].time_reserved
reserved_date = datetime.fromtimestamp( reserved_date = datetime.fromtimestamp(
@@ -740,7 +747,7 @@ async def pending(ctx: Context, legacy, number: int, offset: int):
grouped_proofs, grouped_proofs,
legacy=True, legacy=True,
) )
print(f"{token_legacy}\n") print(f"Legacy token: {token_legacy}\n")
print("--------------------------\n") print("--------------------------\n")
print("To remove all spent tokens use: cashu burn -a") print("To remove all spent tokens use: cashu burn -a")
@@ -1077,5 +1084,5 @@ async def selfpay(ctx: Context, all: bool = False):
print(f"Selfpay token for mint {wallet.url}:") print(f"Selfpay token for mint {wallet.url}:")
print("") print("")
print(token) print(token)
tokenObj = TokenV3.deserialize(token) token_obj = TokenV4.deserialize(token)
await receive(wallet, tokenObj) await receive(wallet, token_obj)

View File

@@ -1,10 +1,9 @@
import base64
import json
import os import os
from typing import Optional
from loguru import logger from loguru import logger
from ..core.base import TokenV1, TokenV2, TokenV3, TokenV3Token from ..core.base import TokenV3, TokenV4
from ..core.db import Database from ..core.db import Database
from ..core.helpers import sum_proofs from ..core.helpers import sum_proofs
from ..core.migrations import migrate_databases from ..core.migrations import migrate_databases
@@ -55,7 +54,7 @@ async def redeem_TokenV3_multimint(wallet: Wallet, token: TokenV3) -> Wallet:
os.path.join(settings.cashu_dir, wallet.name), os.path.join(settings.cashu_dir, wallet.name),
unit=token.unit or wallet.unit.name, unit=token.unit or wallet.unit.name,
) )
keyset_ids = mint_wallet._get_proofs_keysets(t.proofs) keyset_ids = mint_wallet._get_proofs_keyset_ids(t.proofs)
logger.trace(f"Keysets in tokens: {' '.join(set(keyset_ids))}") logger.trace(f"Keysets in tokens: {' '.join(set(keyset_ids))}")
await mint_wallet.load_mint() await mint_wallet.load_mint()
proofs_to_keep, _ = await mint_wallet.redeem(t.proofs) proofs_to_keep, _ = await mint_wallet.redeem(t.proofs)
@@ -65,59 +64,24 @@ async def redeem_TokenV3_multimint(wallet: Wallet, token: TokenV3) -> Wallet:
return mint_wallet return mint_wallet
def serialize_TokenV2_to_TokenV3(tokenv2: TokenV2): async def redeem_TokenV4(wallet: Wallet, token: TokenV4) -> Wallet:
"""Helper function to receive legacy TokenV2 tokens.
Takes a list of proofs and constructs a *serialized* TokenV3 to be received through
the ordinary path.
Returns:
TokenV3: TokenV3
""" """
tokenv3 = TokenV3(token=[TokenV3Token(proofs=tokenv2.proofs)]) Redeem a token with a single mint.
if tokenv2.mints:
tokenv3.token[0].mint = tokenv2.mints[0].url
token_serialized = tokenv3.serialize()
return token_serialized
def serialize_TokenV1_to_TokenV3(tokenv1: TokenV1):
"""Helper function to receive legacy TokenV1 tokens.
Takes a list of proofs and constructs a *serialized* TokenV3 to be received through
the ordinary path.
Returns:
TokenV3: TokenV3
""" """
tokenv3 = TokenV3(token=[TokenV3Token(proofs=tokenv1.__root__)]) await wallet.load_mint()
token_serialized = tokenv3.serialize() proofs_to_keep, _ = await wallet.redeem(token.proofs)
return token_serialized print(f"Received {wallet.unit.str(sum_proofs(proofs_to_keep))}")
return wallet
def deserialize_token_from_string(token: str) -> TokenV3: def deserialize_token_from_string(token: str) -> TokenV4:
# deserialize token # deserialize token
# ----- backwards compatibility ----- if token.startswith("cashuA"):
tokenV3Obj = TokenV3.deserialize(token)
# V2Tokens (0.7-0.11.0) (eyJwcm9...) return TokenV4.from_tokenv3(tokenV3Obj)
if token.startswith("eyJwcm9"): if token.startswith("cashuB"):
try: tokenObj = TokenV4.deserialize(token)
tokenv2 = TokenV2.parse_obj(json.loads(base64.urlsafe_b64decode(token)))
token = serialize_TokenV2_to_TokenV3(tokenv2)
except Exception:
pass
# V1Tokens (<0.7) (W3siaWQ...)
if token.startswith("W3siaWQ"):
try:
tokenv1 = TokenV1.parse_obj(json.loads(base64.urlsafe_b64decode(token)))
token = serialize_TokenV1_to_TokenV3(tokenv1)
except Exception:
pass
if token.startswith("cashu"):
tokenObj = TokenV3.deserialize(token)
assert len(tokenObj.token), Exception("no proofs in token")
assert len(tokenObj.token[0].proofs), Exception("no proofs in token")
return tokenObj return tokenObj
raise Exception("Invalid token") raise Exception("Invalid token")
@@ -125,38 +89,13 @@ def deserialize_token_from_string(token: str) -> TokenV3:
async def receive( async def receive(
wallet: Wallet, wallet: Wallet,
tokenObj: TokenV3, tokenObj: TokenV4,
) -> Wallet: ) -> Wallet:
logger.debug(f"receive: {tokenObj}") # redeem tokens with new wallet instances
proofs = [p for t in tokenObj.token for p in t.proofs] mint_wallet = await redeem_TokenV4(
wallet,
includes_mint_info: bool = any([t.mint for t in tokenObj.token]) tokenObj,
)
if includes_mint_info:
# redeem tokens with new wallet instances
mint_wallet = await redeem_TokenV3_multimint(
wallet,
tokenObj,
)
else:
# this is very legacy code, virtually any token should have mint information
# no mint information present, we extract the proofs find the mint and unit from the db
keyset_in_token = proofs[0].id
assert keyset_in_token
# we get the keyset from the db
mint_keysets = await get_keysets(id=keyset_in_token, db=wallet.db)
assert mint_keysets, Exception(f"we don't know this keyset: {keyset_in_token}")
mint_keyset = [k for k in mint_keysets if k.id == keyset_in_token][0]
assert mint_keyset.mint_url, Exception("we don't know this mint's URL")
# now we have the URL
mint_wallet = await Wallet.with_db(
mint_keyset.mint_url,
os.path.join(settings.cashu_dir, wallet.name),
unit=mint_keyset.unit.name or wallet.unit.name,
)
await mint_wallet.load_mint(keyset_in_token)
_, _ = await mint_wallet.redeem(proofs)
print(f"Received {mint_wallet.unit.str(sum_proofs(proofs))}")
# reload main wallet so the balance updates # reload main wallet so the balance updates
await wallet.load_proofs(reload=True) await wallet.load_proofs(reload=True)
@@ -172,6 +111,7 @@ async def send(
offline: bool = False, offline: bool = False,
include_dleq: bool = False, include_dleq: bool = False,
include_fees: bool = False, include_fees: bool = False,
memo: Optional[str] = None,
): ):
""" """
Prints token to send to stdout. Prints token to send to stdout.
@@ -210,21 +150,9 @@ async def send(
) )
token = await wallet.serialize_proofs( token = await wallet.serialize_proofs(
send_proofs, send_proofs, include_dleq=include_dleq, legacy=legacy, memo=memo
include_mints=True,
include_dleq=include_dleq,
) )
print(token) print(token)
await wallet.set_reserved(send_proofs, reserved=True) await wallet.set_reserved(send_proofs, reserved=True)
if legacy:
print("")
print("Old token format:")
print("")
token = await wallet.serialize_proofs(
send_proofs,
legacy=True,
include_dleq=include_dleq,
)
print(token)
return wallet.available_balance, token return wallet.available_balance, token

View File

@@ -6,7 +6,7 @@ import click
from httpx import ConnectError from httpx import ConnectError
from loguru import logger from loguru import logger
from ..core.base import TokenV3 from ..core.base import TokenV4
from ..core.settings import settings from ..core.settings import settings
from ..nostr.client.client import NostrClient from ..nostr.client.client import NostrClient
from ..nostr.event import Event from ..nostr.event import Event
@@ -127,10 +127,10 @@ async def receive_nostr(
for w in words: for w in words:
try: try:
# call the receive method # call the receive method
tokenObj: TokenV3 = deserialize_token_from_string(w) tokenObj: TokenV4 = deserialize_token_from_string(w)
print( print(
f"Receiving {tokenObj.get_amount()} sat on mint" f"Receiving {tokenObj.amount} sat on mint"
f" {tokenObj.get_mints()[0]} from nostr user {event.public_key} at" f" {tokenObj.mint} from nostr user {event.public_key} at"
f" {date_str}" f" {date_str}"
) )
asyncio.run( asyncio.run(

View File

@@ -1,5 +1,3 @@
import base64
import json
from itertools import groupby from itertools import groupby
from typing import Dict, List, Optional from typing import Dict, List, Optional
@@ -7,10 +5,11 @@ from loguru import logger
from ..core.base import ( from ..core.base import (
Proof, Proof,
TokenV2,
TokenV2Mint,
TokenV3, TokenV3,
TokenV3Token, TokenV3Token,
TokenV4,
TokenV4Proof,
TokenV4Token,
Unit, Unit,
WalletKeyset, WalletKeyset,
) )
@@ -64,7 +63,7 @@ class WalletProofs(SupportsDb, SupportsKeysets):
ret[unit].append(proof) ret[unit].append(proof)
return ret return ret
def _get_proofs_keysets(self, proofs: List[Proof]) -> List[str]: def _get_proofs_keyset_ids(self, proofs: List[Proof]) -> List[str]:
"""Extracts all keyset ids from a list of proofs. """Extracts all keyset ids from a list of proofs.
Args: Args:
@@ -92,8 +91,31 @@ class WalletProofs(SupportsDb, SupportsKeysets):
) )
return mint_urls return mint_urls
async def _make_token( async def serialize_proofs(
self, proofs: List[Proof], include_mints=True, include_unit=True self,
proofs: List[Proof],
include_dleq=False,
legacy=False,
memo: Optional[str] = None,
) -> str:
"""Produces sharable token with proofs and mint information.
Args:
proofs (List[Proof]): List of proofs to be included in the token
legacy (bool, optional): Whether to produce a legacy V3 token. Defaults to False.
Returns:
str: Serialized Cashu token
"""
if legacy:
tokenv3 = await self._make_tokenv3(proofs, memo)
return tokenv3.serialize(include_dleq)
else:
tokenv4 = await self._make_token(proofs, include_dleq, memo)
return tokenv4.serialize(include_dleq)
async def _make_tokenv3(
self, proofs: List[Proof], memo: Optional[str] = None
) -> TokenV3: ) -> TokenV3:
""" """
Takes list of proofs and produces a TokenV3 by looking up Takes list of proofs and produces a TokenV3 by looking up
@@ -101,108 +123,81 @@ class WalletProofs(SupportsDb, SupportsKeysets):
Args: Args:
proofs (List[Proof]): List of proofs to be included in the token proofs (List[Proof]): List of proofs to be included in the token
include_mints (bool, optional): Whether to include the mint URLs in the token. Defaults to True. memo (Optional[str], optional): Memo to be included in the token. Defaults to None.
Returns: Returns:
TokenV3: TokenV3 object TokenV3: TokenV3 object
""" """
token = TokenV3() token = TokenV3()
if include_unit:
token.unit = self.unit.name
if include_mints: # we create a map from mint url to keyset id and then group
# we create a map from mint url to keyset id and then group # all proofs with their mint url to build a tokenv3
# all proofs with their mint url to build a tokenv3
# extract all keysets from proofs # extract all keysets from proofs
keysets = self._get_proofs_keysets(proofs) keysets = self._get_proofs_keyset_ids(proofs)
# get all mint URLs for all unique keysets from db # get all mint URLs for all unique keysets from db
mint_urls = await self._get_keyset_urls(keysets) mint_urls = await self._get_keyset_urls(keysets)
# append all url-grouped proofs to token # append all url-grouped proofs to token
for url, ids in mint_urls.items(): for url, ids in mint_urls.items():
mint_proofs = [p for p in proofs if p.id in ids] mint_proofs = [p for p in proofs if p.id in ids]
token.token.append(TokenV3Token(mint=url, proofs=mint_proofs)) token.token.append(TokenV3Token(mint=url, proofs=mint_proofs))
else:
token_proofs = TokenV3Token(proofs=proofs) if memo:
token.token.append(token_proofs) token.memo = memo
return token return token
async def serialize_proofs( async def _make_tokenv4(
self, proofs: List[Proof], include_mints=True, include_dleq=False, legacy=False self, proofs: List[Proof], include_dleq=False, memo: Optional[str] = None
) -> str: ) -> TokenV4:
"""Produces sharable token with proofs and mint information. """
Takes a list of proofs and returns a TokenV4
Args: Args:
proofs (List[Proof]): List of proofs to be included in the token proofs (List[Proof]): List of proofs to be serialized
include_mints (bool, optional): Whether to include the mint URLs in the token. Defaults to True.
legacy (bool, optional): Whether to produce a legacy V2 token. Defaults to False.
Returns: Returns:
str: Serialized Cashu token TokenV4: TokenV4 object
""" """
if legacy: # get all keysets from proofs
# V2 tokens keyset_ids = set(self._get_proofs_keyset_ids(proofs))
token_v2 = await self._make_token_v2(proofs, include_mints) keysets = [self.keysets[i] for i in keyset_ids]
return await self._serialize_token_base64_tokenv2(token_v2) # we make sure that all proofs are from keysets of the same mint
if len(set([k.mint_url for k in keysets])) > 1:
raise ValueError("TokenV4 can only contain proofs from a single mint URL")
mint_url = keysets[0].mint_url
if not mint_url:
raise ValueError("No mint URL found for keyset")
# # deprecated code for V1 tokens # we make sure that all keysets have the same unit
# proofs_serialized = [p.to_dict() for p in proofs] if len(set([k.unit for k in keysets])) > 1:
# return base64.urlsafe_b64encode( raise ValueError(
# json.dumps(proofs_serialized).encode() "TokenV4 can only contain proofs from keysets with the same unit"
# ).decode() )
unit_str = keysets[0].unit.name
# V3 tokens tokens: List[TokenV4Token] = []
token = await self._make_token(proofs, include_mints) for keyset_id in keyset_ids:
return token.serialize(include_dleq) proofs_keyset = [p for p in proofs if p.id == keyset_id]
tokenv4_proofs = []
for proof in proofs_keyset:
tokenv4_proofs.append(TokenV4Proof.from_proof(proof, include_dleq))
tokenv4_token = TokenV4Token(i=bytes.fromhex(keyset_id), p=tokenv4_proofs)
tokens.append(tokenv4_token)
async def _make_token_v2(self, proofs: List[Proof], include_mints=True) -> TokenV2: return TokenV4(m=mint_url, u=unit_str, t=tokens, d=memo)
async def _make_token(
self, proofs: List[Proof], include_dleq=False, memo: Optional[str] = None
) -> TokenV4:
""" """
Takes list of proofs and produces a TokenV2 by looking up Takes a list of proofs and returns a TokenV4
the keyset id and mint URLs from the database.
"""
# build token
token = TokenV2(proofs=proofs)
# add mint information to the token, if requested
if include_mints:
# dummy object to hold information about the mint
mints: Dict[str, TokenV2Mint] = {}
# dummy object to hold all keyset id's we need to fetch from the db later
keysets: List[str] = [proof.id for proof in proofs if proof.id]
# iterate through unique keyset ids
for id in set(keysets):
# load the keyset from the db
keysets_db = await get_keysets(id=id, db=self.db)
keyset_db = keysets_db[0] if keysets_db else None
if keyset_db and keyset_db.mint_url and keyset_db.id:
# we group all mints according to URL
if keyset_db.mint_url not in mints:
mints[keyset_db.mint_url] = TokenV2Mint(
url=keyset_db.mint_url,
ids=[keyset_db.id],
)
else:
# if a mint URL has multiple keysets, append to the already existing list
mints[keyset_db.mint_url].ids.append(keyset_db.id)
if len(mints) > 0:
# add mints grouped by url to the token
token.mints = list(mints.values())
return token
async def _serialize_token_base64_tokenv2(self, token: TokenV2) -> str:
"""
Takes a TokenV2 and serializes it in urlsafe_base64.
Args: Args:
token (TokenV2): TokenV2 object to be serialized proofs (List[Proof]): List of proofs to be serialized
Returns: Returns:
str: Serialized token TokenV4: TokenV4 object
""" """
# encode the token as a base64 string
token_base64 = base64.urlsafe_b64encode( return await self._make_tokenv4(proofs, include_dleq, memo)
json.dumps(token.to_dict()).encode()
).decode()
return token_base64

View File

@@ -588,6 +588,11 @@ class Wallet(
) )
amounts = keep_outputs + send_outputs amounts = keep_outputs + send_outputs
if not amounts:
logger.warning("Swap has no outputs")
return [], []
# generate secrets for new outputs # generate secrets for new outputs
if secret_lock is None: if secret_lock is None:
secrets, rs, derivation_paths = await self.generate_n_secrets(len(amounts)) secrets, rs, derivation_paths = await self.generate_n_secrets(len(amounts))

51
poetry.lock generated
View File

@@ -122,6 +122,57 @@ click = "*"
ecdsa = "*" ecdsa = "*"
secp256k1 = "*" secp256k1 = "*"
[[package]]
name = "cbor2"
version = "5.6.2"
description = "CBOR (de)serializer with extensive tag support"
optional = false
python-versions = ">=3.8"
files = [
{file = "cbor2-5.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:516b8390936bb172ff18d7b609a452eaa51991513628949b0a9bf25cbe5a7129"},
{file = "cbor2-5.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b8b504b590367a51fe8c0d9b8cb458a614d782d37b24483097e2b1e93ed0fff"},
{file = "cbor2-5.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f687e6731b1198811223576800258a712ddbfdcfa86c0aee2cc8269193e6b96"},
{file = "cbor2-5.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e94043d99fe779f62a15a5e156768588a2a7047bb3a127fa312ac1135ff5ecb"},
{file = "cbor2-5.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8af7162fcf7aa2649f02563bdb18b2fa6478b751eee4df0257bffe19ea8f107a"},
{file = "cbor2-5.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ea7ecd81c5c6e02c2635973f52a0dd1e19c0bf5ef51f813d8cd5e3e7ed072726"},
{file = "cbor2-5.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3c7f223f1fedc74d33f363d184cb2bab9e4bdf24998f73b5e3bef366d6c41628"},
{file = "cbor2-5.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ea9e150029c3976c46ee9870b6dcdb0a5baae21008fe3290564886b11aa2b64"},
{file = "cbor2-5.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e06710e5cf6f56b82b0b90d2f356aa229b99e570994534206985f675fd307"},
{file = "cbor2-5.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b01a718e083e6de8b43296c3ccdb3aa8af6641f6bbb3ea1700427c6af73db28a"},
{file = "cbor2-5.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac85eb731c524d148f608b9bdb2069fa79e374a10ed5d10a2405eba9a6561e60"},
{file = "cbor2-5.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03e5b68867b9d89ff2abd14ef7c6d42fbd991adc3e734a19a294935f22a4d05a"},
{file = "cbor2-5.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7221b83000ee01d674572eec1d1caa366eac109d1d32c14d7af9a4aaaf496563"},
{file = "cbor2-5.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:9aca73b63bdc6561e1a0d38618e78b9c204c942260d51e663c92c4ba6c961684"},
{file = "cbor2-5.6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:377cfe9d5560c682486faef6d856226abf8b2801d95fa29d4e5d75b1615eb091"},
{file = "cbor2-5.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fdc564ef2e9228bcd96ec8c6cdaa431a48ab03b3fb8326ead4b3f986330e5b9e"},
{file = "cbor2-5.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d1c0021d9a1f673066de7c8941f71a59abb11909cc355892dda01e79a2b3045"},
{file = "cbor2-5.6.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fde9e704e96751e0729cc58b912d0e77c34387fb6bcceea0817069e8683df45"},
{file = "cbor2-5.6.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:30e9ba8f4896726ca61869efacda50b6859aff92162ae5a0e192859664f36c81"},
{file = "cbor2-5.6.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a1e18e65ac71e04434ff5b58bde5c53f85b9c5bc92a3c0e2265089d3034f3"},
{file = "cbor2-5.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:94981277b4bf448a2754c1f34a9d0055a9d1c5a8d102c933ffe95c80f1085bae"},
{file = "cbor2-5.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f70db0ebcf005c25408e8d5cc4b9558c899f13a3e2f8281fa3d3be4894e0e821"},
{file = "cbor2-5.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:22c24fe9ef1696a84b8fd80ff66eb0e5234505d8b9a9711fc6db57bce10771f3"},
{file = "cbor2-5.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4a3420f80d6b942874d66eaad07658066370df994ddee4125b48b2cbc61ece"},
{file = "cbor2-5.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b28d8ff0e726224a7429281700c28afe0e665f83f9ae79648cbae3f1a391cbf"},
{file = "cbor2-5.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c10ede9462458998f1b9c488e25fe3763aa2491119b7af472b72bf538d789e24"},
{file = "cbor2-5.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ea686dfb5e54d690e704ce04993bc8ca0052a7cd2d4b13dd333a41cca8a05a05"},
{file = "cbor2-5.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:22996159b491d545ecfd489392d3c71e5d0afb9a202dfc0edc8b2cf413a58326"},
{file = "cbor2-5.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9faa0712d414a88cc1244c78cd4b28fced44f1827dbd8c1649e3c40588aa670f"},
{file = "cbor2-5.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6031a284d93fc953fc2a2918f261c4f5100905bd064ca3b46961643e7312a828"},
{file = "cbor2-5.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30c8a9a9df79f26e72d8d5fa51ef08eb250d9869a711bcf9539f1865916c983"},
{file = "cbor2-5.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44bf7457fca23209e14dab8181dff82466a83b72e55b444dbbfe90fa67659492"},
{file = "cbor2-5.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc29c068687aa2e7778f63b653f1346065b858427a2555df4dc2191f4a0de8ce"},
{file = "cbor2-5.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42eaf0f768bd27afcb38135d5bfc361d3a157f1f5c7dddcd8d391f7fa43d9de8"},
{file = "cbor2-5.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:8839b73befa010358477736680657b9d08c1ed935fd973decb1909712a41afdc"},
{file = "cbor2-5.6.2-py3-none-any.whl", hash = "sha256:c0b53a65673550fde483724ff683753f49462d392d45d7b6576364b39e76e54c"},
{file = "cbor2-5.6.2.tar.gz", hash = "sha256:b7513c2dea8868991fad7ef8899890ebcf8b199b9b4461c3c11d7ad3aef4820d"},
]
[package.extras]
benchmarks = ["pytest-benchmark (==4.0.0)"]
doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)", "typing-extensions"]
test = ["coverage (>=7)", "hypothesis", "pytest"]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2024.2.2" version = "2024.2.2"

View File

@@ -34,6 +34,7 @@ bolt11 = "^2.0.5"
pre-commit = "^3.5.0" pre-commit = "^3.5.0"
websockets = "^12.0" websockets = "^12.0"
slowapi = "^0.1.9" slowapi = "^0.1.9"
cbor2 = "^5.6.2"
[tool.poetry.extras] [tool.poetry.extras]
pgsql = ["psycopg2-binary"] pgsql = ["psycopg2-binary"]

View File

@@ -1,6 +1,6 @@
import pytest import pytest
from cashu.core.base import TokenV3 from cashu.core.base import TokenV3, TokenV4, Unit
from cashu.core.helpers import calculate_number_of_blank_outputs from cashu.core.helpers import calculate_number_of_blank_outputs
from cashu.core.split import amount_split from cashu.core.split import amount_split
@@ -21,6 +21,20 @@ def test_tokenv3_deserialize_get_attributes():
assert len(token.get_proofs()) == 2 assert len(token.get_proofs()) == 2
def test_tokenv3_deserialize_serialize():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJh"
"bW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOG"
"UyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiM"
"DIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token = TokenV3.deserialize(token_str)
assert token.serialize() == token_str
def test_tokenv3_deserialize_serialize_with_dleq(): def test_tokenv3_deserialize_serialize_with_dleq():
token_str = ( token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93M" "cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93M"
@@ -45,20 +59,6 @@ def test_tokenv3_deserialize_serialize_with_dleq():
assert token.serialize(include_dleq=True) == token_str assert token.serialize(include_dleq=True) == token_str
def test_tokenv3_deserialize_serialize():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJh"
"bW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOG"
"UyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiM"
"DIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token = TokenV3.deserialize(token_str)
assert token.serialize() == token_str
def test_tokenv3_deserialize_serialize_no_dleq(): def test_tokenv3_deserialize_serialize_no_dleq():
token_str = ( token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhb" "cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhb"
@@ -107,7 +107,7 @@ def test_tokenv3_deserialize_with_memo():
assert token.memo == "Test memo" assert token.memo == "Test memo"
def test_serialize_example_token_nut00(): def test_tokenv3_serialize_example_token_nut00():
token_dict = { token_dict = {
"token": [ "token": [
{ {
@@ -144,6 +144,107 @@ def test_serialize_example_token_nut00():
) )
def test_tokenv4_deserialize_get_attributes():
token_str = "cashuBo2F0gaJhaUgArSaMTR9YJmFwgqNhYQJhc3hAMDZlM2UzZjY4NDRiOGZkOGQ3NDMwODY1MjY3MjQ5YWU3NjdhMzg5MDBjODdkNGE0ZDMxOGY4MTJmNzkzN2ZiMmFjWCEDXDG_wzG35Lu4vcAtiycLSQlNqH65afih9N2SrFJn3GCjYWEIYXN4QDBmNTE5YjgwOWZlNmQ5MzZkMjVhYmU1YjhjYTZhMDRlNDc3OTJjOTI0YTkwZWRmYjU1MmM1ZjkzODJkNzFjMDJhY1ghA4CNH8dD8NNt715E37Ar65X6p6uBUoDbe8JipQp81TIgYW11aHR0cDovL2xvY2FsaG9zdDozMzM4YXVjc2F0"
token = TokenV4.deserialize(token_str)
assert token.mint == "http://localhost:3338"
assert token.amounts == [2, 8]
assert token.amount == 10
assert token.unit == Unit.sat.name
assert token.memo is None
assert len(token.proofs) == 2
def test_tokenv4_deserialize_serialize():
token_str = "cashuBo2F0gaJhaUgArSaMTR9YJmFwgqNhYQJhc3hAMDZlM2UzZjY4NDRiOGZkOGQ3NDMwODY1MjY3MjQ5YWU3NjdhMzg5MDBjODdkNGE0ZDMxOGY4MTJmNzkzN2ZiMmFjWCEDXDG_wzG35Lu4vcAtiycLSQlNqH65afih9N2SrFJn3GCjYWEIYXN4QDBmNTE5YjgwOWZlNmQ5MzZkMjVhYmU1YjhjYTZhMDRlNDc3OTJjOTI0YTkwZWRmYjU1MmM1ZjkzODJkNzFjMDJhY1ghA4CNH8dD8NNt715E37Ar65X6p6uBUoDbe8JipQp81TIgYW11aHR0cDovL2xvY2FsaG9zdDozMzM4YXVjc2F0"
token = TokenV4.deserialize(token_str)
assert token.serialize() == token_str
def test_tokenv4_deserialize_with_dleq():
token_str = "cashuBo2F0gaJhaUgArSaMTR9YJmFwgqRhYQhhc3hAY2I4ZWViZWE3OGRjMTZmMWU4MmY5YTZlOWI4YTU3YTM5ZDM2M2M5MzZkMzBmZTI5YmVlZDI2M2MwOGFkOTY2M2FjWCECRmlA6zYOcRSgigEUDv0BBtC2Ag8x8ZOaZUKo8J2_VWdhZKNhZVggscHmr2oHB_x9Bzhgeg2p9Vbq5Ai23olDz2JbmCRx6dlhc1ggrPmtYrRAgEHnYLIQ83cgyFjAjWNqMeNhUadHMxEm0edhclggQ5c_5bES_NhtzunlDls70fhMDWDgo9DY0kk1GuJGM2ikYWECYXN4QDQxN2E2MjZmNWMyNmVhNjliODM0YTZkZTcxYmZiMGY3ZTQ0NDhlZGFkY2FlNGRmNWVhMzM3NDdmOTVhYjRhYjRhY1ghAwyZ1QstFpNe0sppbduQxiePmGVUUk0mWDj5JAFs74-LYWSjYWVYIPyAzLub_bwc60qFkNfETjig-ESZSR8xdpANy1rHwvHKYXNYIOCInwuipARTL8IFT6NoSJqeeSMjlcbPzL-YSmXjDLIuYXJYIOLk-C0Fhba02B0Ut1BjMQqzxVGaO1NJM9Wi_aDQ37jqYW11aHR0cDovL2xvY2FsaG9zdDozMzM4YXVjc2F0"
token = TokenV4.deserialize(token_str)
assert token.proofs[0].dleq is not None
assert token.proofs[0].dleq.e
assert token.proofs[0].dleq.s
assert token.proofs[0].dleq.r
assert token.serialize(include_dleq=True) == token_str
def test_tokenv4_serialize_example_single_keyset_nut00():
token_dict = {
"t": [
{
"i": bytes.fromhex("00ad268c4d1f5826"),
"p": [
{
"a": 1,
"s": "9a6dbb847bd232ba76db0df197216b29d3b8cc14553cd27827fc1cc942fedb4e",
"c": bytes.fromhex(
"038618543ffb6b8695df4ad4babcde92a34a96bdcd97dcee0d7ccf98d472126792"
),
},
],
},
],
"d": "Thank you",
"m": "http://localhost:3338",
"u": "sat",
}
tokenObj = TokenV4.parse_obj(token_dict)
assert (
tokenObj.serialize()
== "cashuBpGF0gaJhaUgArSaMTR9YJmFwgaNhYQFhc3hAOWE2ZGJiODQ3YmQyMzJiYTc2ZGIwZGYxOTcyMTZiMjlkM2I4Y2MxNDU1M2NkMjc4MjdmYzFjYzk0MmZlZGI0ZWFjWCEDhhhUP_trhpXfStS6vN6So0qWvc2X3O4NfM-Y1HISZ5JhZGlUaGFuayB5b3VhbXVodHRwOi8vbG9jYWxob3N0OjMzMzhhdWNzYXQ="
)
def test_tokenv4_serialize_example_token_nut00():
token_dict = {
"t": [
{
"i": bytes.fromhex("00ffd48b8f5ecf80"),
"p": [
{
"a": 1,
"s": "acc12435e7b8484c3cf1850149218af90f716a52bf4a5ed347e48ecc13f77388",
"c": bytes.fromhex(
"0244538319de485d55bed3b29a642bee5879375ab9e7a620e11e48ba482421f3cf"
),
},
],
},
{
"i": bytes.fromhex("00ad268c4d1f5826"),
"p": [
{
"a": 2,
"s": "1323d3d4707a58ad2e23ada4e9f1f49f5a5b4ac7b708eb0d61f738f48307e8ee",
"c": bytes.fromhex(
"023456aa110d84b4ac747aebd82c3b005aca50bf457ebd5737a4414fac3ae7d94d"
),
},
{
"a": 1,
"s": "56bcbcbb7cc6406b3fa5d57d2174f4eff8b4402b176926d3a57d3c3dcbb59d57",
"c": bytes.fromhex(
"0273129c5719e599379a974a626363c333c56cafc0e6d01abe46d5808280789c63"
),
},
],
},
],
"m": "http://localhost:3338",
"u": "sat",
}
tokenObj = TokenV4.parse_obj(token_dict)
assert (
tokenObj.serialize()
== "cashuBo2F0gqJhaUgA_9SLj17PgGFwgaNhYQFhc3hAYWNjMTI0MzVlN2I4NDg0YzNjZjE4NTAxNDkyMThhZjkwZjcxNmE1MmJmNGE1ZWQzNDdlNDhlY2MxM2Y3NzM4OGFjWCECRFODGd5IXVW-07KaZCvuWHk3WrnnpiDhHki6SCQh88-iYWlIAK0mjE0fWCZhcIKjYWECYXN4QDEzMjNkM2Q0NzA3YTU4YWQyZTIzYWRhNGU5ZjFmNDlmNWE1YjRhYzdiNzA4ZWIwZDYxZjczOGY0ODMwN2U4ZWVhY1ghAjRWqhENhLSsdHrr2Cw7AFrKUL9Ffr1XN6RBT6w659lNo2FhAWFzeEA1NmJjYmNiYjdjYzY0MDZiM2ZhNWQ1N2QyMTc0ZjRlZmY4YjQ0MDJiMTc2OTI2ZDNhNTdkM2MzZGNiYjU5ZDU3YWNYIQJzEpxXGeWZN5qXSmJjY8MzxWyvwObQGr5G1YCCgHicY2FtdWh0dHA6Ly9sb2NhbGhvc3Q6MzMzOGF1Y3NhdA=="
)
def test_calculate_number_of_blank_outputs(): def test_calculate_number_of_blank_outputs():
# Example from NUT-08 specification. # Example from NUT-08 specification.
fee_reserve_sat = 1000 fee_reserve_sat = 1000

View File

@@ -1,12 +1,10 @@
import asyncio import asyncio
import base64
import json
from typing import Tuple from typing import Tuple
import pytest import pytest
from click.testing import CliRunner from click.testing import CliRunner
from cashu.core.base import TokenV3 from cashu.core.base import TokenV4
from cashu.core.settings import settings from cashu.core.settings import settings
from cashu.wallet.cli.cli import cli from cashu.wallet.cli.cli import cli
from cashu.wallet.wallet import Wallet from cashu.wallet.wallet import Wallet
@@ -378,8 +376,8 @@ def test_send(mint, cli_prefix):
assert result.exception is None assert result.exception is None
print("test_send", result.output) print("test_send", result.output)
token_str = result.output.split("\n")[0] token_str = result.output.split("\n")[0]
assert "cashuA" in token_str, "output does not have a token" assert "cashuB" in token_str, "output does not have a token"
token = TokenV3.deserialize(token_str) token = TokenV4.deserialize(token_str).to_tokenv3()
assert token.token[0].proofs[0].dleq is None, "dleq included" assert token.token[0].proofs[0].dleq is None, "dleq included"
@@ -392,8 +390,8 @@ def test_send_with_dleq(mint, cli_prefix):
assert result.exception is None assert result.exception is None
print("test_send_with_dleq", result.output) print("test_send_with_dleq", result.output)
token_str = result.output.split("\n")[0] token_str = result.output.split("\n")[0]
assert "cashuA" in token_str, "output does not have a token" assert "cashuB" in token_str, "output does not have a token"
token = TokenV3.deserialize(token_str) token = TokenV4.deserialize(token_str).to_tokenv3()
assert token.token[0].proofs[0].dleq is not None, "no dleq included" assert token.token[0].proofs[0].dleq is not None, "no dleq included"
@@ -406,8 +404,8 @@ def test_send_legacy(mint, cli_prefix):
assert result.exception is None assert result.exception is None
print("test_send_legacy", result.output) print("test_send_legacy", result.output)
# this is the legacy token in the output # this is the legacy token in the output
token_str = result.output.split("\n")[4] token_str = result.output.split("\n")[0]
assert token_str.startswith("eyJwcm9v"), "output is not as expected" assert token_str.startswith("cashuAey"), "output is not as expected"
def test_send_offline(mint, cli_prefix): def test_send_offline(mint, cli_prefix):
@@ -419,7 +417,7 @@ def test_send_offline(mint, cli_prefix):
assert result.exception is None assert result.exception is None
print("SEND") print("SEND")
print("test_send_without_split", result.output) print("test_send_without_split", result.output)
assert "cashuA" in result.output, "output does not have a token" assert "cashuB" in result.output, "output does not have a token"
def test_send_too_much(mint, cli_prefix): def test_send_too_much(mint, cli_prefix):
@@ -447,113 +445,6 @@ def test_receive_tokenv3(mint, cli_prefix):
print(result.output) print(result.output)
def test_receive_tokenv3_no_mint(mint, cli_prefix):
# this test works only if the previous test succeeds because we simulate the case
# where the mint URL is not in the token therefore, we need to know the mint keyset
# already and have the mint URL in the db
runner = CliRunner()
token_dict = {
"token": [
{
"proofs": [
{
"id": "009a1f293253e41e",
"amount": 2,
"secret": "ea3420987e1ecd71de58e4ff00e8a94d1f1f9333dad98e923e3083d21bf314e2",
"C": "0204eb99cf27105b4de4029478376d6f71e9e3d5af1cc28a652c028d1bcd6537cc",
},
{
"id": "009a1f293253e41e",
"amount": 8,
"secret": "3447975db92f43b269290e05b91805df7aa733f622e55d885a2cab78e02d4a72",
"C": "0286c78750d414bc067178cbac0f3551093cea47d213ebf356899c972448ee6255",
},
]
}
]
}
token = "cashuA" + base64.b64encode(json.dumps(token_dict).encode()).decode()
print("RECEIVE")
print(token)
result = runner.invoke(
cli,
[
*cli_prefix,
"receive",
token,
],
)
assert result.exception is None
print(result.output)
def test_receive_tokenv2(mint, cli_prefix):
runner = CliRunner()
token_dict = {
"proofs": [
{
"id": "009a1f293253e41e",
"amount": 2,
"secret": (
"a1efb610726b342aec209375397fee86a0b88732779ce218e99132f9a975db2a"
),
"C": (
"03057e5fe352bac785468ffa51a1ecf0f75af24d2d27ab1fd00164672a417d9523"
),
},
{
"id": "009a1f293253e41e",
"amount": 8,
"secret": (
"b065a17938bc79d6224dc381873b8b7f3a46267e8b00d9ce59530354d9d81ae4"
),
"C": (
"021e83773f5eb66f837a5721a067caaa8d7018ef0745b4302f4e2c6cac8806dc69"
),
},
],
"mints": [{"url": "http://localhost:3337", "ids": ["009a1f293253e41e"]}],
}
token = base64.b64encode(json.dumps(token_dict).encode()).decode()
result = runner.invoke(
cli,
[*cli_prefix, "receive", token],
)
assert result.exception is None
print("RECEIVE")
print(result.output)
def test_receive_tokenv1(mint, cli_prefix):
runner = CliRunner()
token_dict = [
{
"id": "009a1f293253e41e",
"amount": 2,
"secret": (
"bc0360c041117969ef7b8add48d0981c669619aa5743cccce13d4a771c9e164d"
),
"C": "026fd492f933e9240f36fb2559a7327f47b3441b895a5f8f0b1d6825fee73438f0",
},
{
"id": "009a1f293253e41e",
"amount": 8,
"secret": (
"cf83bd8df35bb104d3818511c1653e9ebeb2b645a36fd071b2229aa2c3044acd"
),
"C": "0279606f3dfd7784757c6320b17e1bf2211f284318814c12bfaa40680e017abd34",
},
]
token = base64.b64encode(json.dumps(token_dict).encode()).decode()
result = runner.invoke(
cli,
[*cli_prefix, "receive", token],
)
assert result.exception is None
print("RECEIVE")
print(result.output)
def test_nostr_send(mint, cli_prefix): def test_nostr_send(mint, cli_prefix):
runner = CliRunner() runner = CliRunner()
result = runner.invoke( result = runner.invoke(