mirror of
https://github.com/aljazceru/python-teos.git
synced 2025-12-18 06:34:19 +01:00
Reformats code to match code guidelines
This commit is contained in:
@@ -1,6 +1,4 @@
|
||||
import pytest
|
||||
import responses
|
||||
import requests
|
||||
import json
|
||||
from binascii import hexlify
|
||||
|
||||
@@ -9,7 +7,6 @@ from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
|
||||
import apps.cli.pisa_cli as pisa_cli
|
||||
from apps.cli import PISA_PUBLIC_KEY
|
||||
from test.unit.conftest import get_random_value_hex
|
||||
|
||||
# TODO: should find a way of doing without this
|
||||
|
||||
@@ -13,8 +13,8 @@ from test.simulator.zmq_publisher import ZMQPublisher
|
||||
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
|
||||
|
||||
app = Flask(__name__)
|
||||
HOST = 'localhost'
|
||||
PORT = '18443'
|
||||
HOST = "localhost"
|
||||
PORT = "18443"
|
||||
|
||||
blockchain = []
|
||||
blocks = {}
|
||||
@@ -24,20 +24,20 @@ mempool = []
|
||||
mine_new_block = Event()
|
||||
|
||||
TIME_BETWEEN_BLOCKS = 5
|
||||
GENESIS_PARENT = '0000000000000000000000000000000000000000000000000000000000000000'
|
||||
GENESIS_PARENT = "0000000000000000000000000000000000000000000000000000000000000000"
|
||||
prev_block_hash = GENESIS_PARENT
|
||||
|
||||
|
||||
@app.route('/generate', methods=['POST'])
|
||||
|
||||
@app.route("/generate", methods=["POST"])
|
||||
def generate():
|
||||
global mine_new_block
|
||||
|
||||
mine_new_block.set()
|
||||
|
||||
return Response(status=200, mimetype='application/json')
|
||||
return Response(status=200, mimetype="application/json")
|
||||
|
||||
|
||||
@app.route('/fork', methods=['POST'])
|
||||
@app.route("/fork", methods=["POST"])
|
||||
def create_fork():
|
||||
"""
|
||||
create_fork processes chain fork requests. It will create a fork with the following parameters:
|
||||
@@ -67,10 +67,10 @@ def create_fork():
|
||||
# block heights and blockchain length is currently incorrect. It does the trick to test forks, but should
|
||||
# be fixed for better testing.
|
||||
|
||||
return Response(json.dumps(response), status=200, mimetype='application/json')
|
||||
return Response(json.dumps(response), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
@app.route('/', methods=['POST'])
|
||||
@app.route("/", methods=["POST"])
|
||||
def process_request():
|
||||
"""
|
||||
process_requests simulates the bitcoin-rpc server run by bitcoind. The available commands are limited to the ones
|
||||
@@ -108,7 +108,7 @@ def process_request():
|
||||
|
||||
global mempool
|
||||
request_data = request.get_json()
|
||||
method = request_data.get('method')
|
||||
method = request_data.get("method")
|
||||
|
||||
response = {"id": 0, "result": 0, "error": None}
|
||||
no_param_err = {"code": RPC_MISC_ERROR, "message": "JSON value is not a {} as expected"}
|
||||
@@ -142,8 +142,10 @@ def process_request():
|
||||
response["result"] = {"txid": txid}
|
||||
|
||||
else:
|
||||
response["error"] = {"code": RPC_VERIFY_ALREADY_IN_CHAIN,
|
||||
"message": "Transaction already in block chain"}
|
||||
response["error"] = {
|
||||
"code": RPC_VERIFY_ALREADY_IN_CHAIN,
|
||||
"message": "Transaction already in block chain",
|
||||
}
|
||||
|
||||
else:
|
||||
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
|
||||
@@ -158,16 +160,18 @@ def process_request():
|
||||
if isinstance(txid, str):
|
||||
if txid in mined_transactions:
|
||||
block = blocks.get(mined_transactions[txid]["block"])
|
||||
rawtx = mined_transactions[txid].get('tx')
|
||||
response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get('height')}
|
||||
rawtx = mined_transactions[txid].get("tx")
|
||||
response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get("height")}
|
||||
|
||||
elif txid in mempool:
|
||||
response["result"] = {"confirmations": 0}
|
||||
|
||||
else:
|
||||
response["error"] = {'code': RPC_INVALID_ADDRESS_OR_KEY,
|
||||
'message': 'No such mempool or blockchain transaction. Use gettransaction for '
|
||||
'wallet transactions.'}
|
||||
response["error"] = {
|
||||
"code": RPC_INVALID_ADDRESS_OR_KEY,
|
||||
"message": "No such mempool or blockchain transaction. Use gettransaction for "
|
||||
"wallet transactions.",
|
||||
}
|
||||
else:
|
||||
response["error"] = no_param_err
|
||||
response["error"]["message"] = response["error"]["message"].format("string")
|
||||
@@ -219,7 +223,7 @@ def process_request():
|
||||
else:
|
||||
return abort(404, "Method not found")
|
||||
|
||||
return Response(json.dumps(response), status=200, mimetype='application/json')
|
||||
return Response(json.dumps(response), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def get_param(request_data):
|
||||
@@ -240,8 +244,9 @@ def load_data():
|
||||
def simulate_mining(mode, time_between_blocks):
|
||||
global mempool, mined_transactions, blocks, blockchain, mine_new_block, prev_block_hash
|
||||
|
||||
mining_simulator = ZMQPublisher(topic=b'hashblock', feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR,
|
||||
feed_port=FEED_PORT)
|
||||
mining_simulator = ZMQPublisher(
|
||||
topic=b"hashblock", feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR, feed_port=FEED_PORT
|
||||
)
|
||||
|
||||
# Set the mining event to initialize the blockchain with a block
|
||||
mine_new_block.set()
|
||||
@@ -266,8 +271,12 @@ def simulate_mining(mode, time_between_blocks):
|
||||
mined_transactions[txid] = {"tx": tx, "block": block_hash}
|
||||
|
||||
# FIXME: chain_work is being defined as a incremental counter for now. Multiple chains should be possible.
|
||||
blocks[block_hash] = {"tx": list(txs_to_mine.keys()), "height": len(blockchain), "previousblockhash": prev_block_hash,
|
||||
"chainwork": '{:x}'.format(len(blockchain))}
|
||||
blocks[block_hash] = {
|
||||
"tx": list(txs_to_mine.keys()),
|
||||
"height": len(blockchain),
|
||||
"previousblockhash": prev_block_hash,
|
||||
"chainwork": "{:x}".format(len(blockchain)),
|
||||
}
|
||||
|
||||
mining_simulator.publish_data(binascii.unhexlify(block_hash))
|
||||
blockchain.append(block_hash)
|
||||
@@ -276,22 +285,22 @@ def simulate_mining(mode, time_between_blocks):
|
||||
print("New block mined: {}".format(block_hash))
|
||||
print("\tTransactions: {}".format(list(txs_to_mine.keys())))
|
||||
|
||||
if mode == 'time':
|
||||
if mode == "time":
|
||||
time.sleep(time_between_blocks)
|
||||
|
||||
else:
|
||||
mine_new_block.clear()
|
||||
|
||||
|
||||
def run_simulator(mode='time', time_between_blocks=TIME_BETWEEN_BLOCKS):
|
||||
if mode not in ["time", 'event']:
|
||||
|
||||
def run_simulator(mode="time", time_between_blocks=TIME_BETWEEN_BLOCKS):
|
||||
if mode not in ["time", "event"]:
|
||||
raise ValueError("Node must be time or event")
|
||||
|
||||
mining_thread = Thread(target=simulate_mining, args=[mode, time_between_blocks])
|
||||
mining_thread.start()
|
||||
|
||||
# Setting Flask log to ERROR only so it does not mess with out logging. Also disabling flask initial messages
|
||||
logging.getLogger('werkzeug').setLevel(logging.ERROR)
|
||||
os.environ['WERKZEUG_RUN_MAIN'] = 'true'
|
||||
logging.getLogger("werkzeug").setLevel(logging.ERROR)
|
||||
os.environ["WERKZEUG_RUN_MAIN"] = "true"
|
||||
|
||||
app.run(host=HOST, port=PORT)
|
||||
|
||||
@@ -9,12 +9,12 @@ from test.simulator.bitcoind_sim import run_simulator
|
||||
from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
|
||||
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT
|
||||
|
||||
MIXED_VALUES = values = [-1, 500, '', '111', [], 1.1, None, '', "a" * 31, "b" * 33, get_random_value_hex(32)]
|
||||
MIXED_VALUES = values = [-1, 500, "", "111", [], 1.1, None, "", "a" * 31, "b" * 33, get_random_value_hex(32)]
|
||||
|
||||
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT))
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def run_bitcoind():
|
||||
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event"})
|
||||
bitcoind_thread.daemon = True
|
||||
@@ -31,19 +31,20 @@ def genesis_block_hash(run_bitcoind):
|
||||
|
||||
def check_hash_format(txid):
|
||||
# TODO: #12-check-txid-regexp
|
||||
return isinstance(txid, str) and re.search(r'^[0-9A-Fa-f]{64}$', txid) is not None
|
||||
return isinstance(txid, str) and re.search(r"^[0-9A-Fa-f]{64}$", txid) is not None
|
||||
|
||||
|
||||
def test_help(run_bitcoind):
|
||||
# Help should always return 0
|
||||
assert(bitcoin_cli.help() == 0)
|
||||
assert bitcoin_cli.help() == 0
|
||||
|
||||
|
||||
# FIXME: Better assert for the exceptions would be nice (check the returned errno is the expected one)
|
||||
|
||||
|
||||
def test_getblockhash(genesis_block_hash):
|
||||
# First block
|
||||
assert(check_hash_format(genesis_block_hash))
|
||||
assert check_hash_format(genesis_block_hash)
|
||||
|
||||
# Check that the values are within range and of the proper format (all should fail)
|
||||
for v in MIXED_VALUES:
|
||||
@@ -57,9 +58,9 @@ def test_getblockhash(genesis_block_hash):
|
||||
def test_get_block(genesis_block_hash):
|
||||
# getblock should return a list of transactions and the height
|
||||
block = bitcoin_cli.getblock(genesis_block_hash)
|
||||
assert(isinstance(block.get('tx'), list))
|
||||
assert(len(block.get('tx')) != 0)
|
||||
assert(isinstance(block.get('height'), int))
|
||||
assert isinstance(block.get("tx"), list)
|
||||
assert len(block.get("tx")) != 0
|
||||
assert isinstance(block.get("height"), int)
|
||||
|
||||
# It should fail for wrong data formats and random ids
|
||||
for v in MIXED_VALUES:
|
||||
@@ -73,21 +74,21 @@ def test_get_block(genesis_block_hash):
|
||||
def test_decoderawtransaction(genesis_block_hash):
|
||||
# decoderawtransaction should only return if the given transaction matches a txid format
|
||||
block = bitcoin_cli.getblock(genesis_block_hash)
|
||||
coinbase_txid = block.get('tx')[0]
|
||||
coinbase_txid = block.get("tx")[0]
|
||||
|
||||
coinbase_tx = bitcoin_cli.getrawtransaction(coinbase_txid).get("hex")
|
||||
tx = bitcoin_cli.decoderawtransaction(coinbase_tx)
|
||||
|
||||
assert(isinstance(tx, dict))
|
||||
assert(isinstance(tx.get('txid'), str))
|
||||
assert(check_hash_format(tx.get('txid')))
|
||||
assert isinstance(tx, dict)
|
||||
assert isinstance(tx.get("txid"), str)
|
||||
assert check_hash_format(tx.get("txid"))
|
||||
|
||||
# Therefore should also work for a random transaction hex in our simulation
|
||||
random_tx = TX.create_dummy_transaction()
|
||||
tx = bitcoin_cli.decoderawtransaction(random_tx)
|
||||
assert(isinstance(tx, dict))
|
||||
assert(isinstance(tx.get('txid'), str))
|
||||
assert(check_hash_format(tx.get('txid')))
|
||||
assert isinstance(tx, dict)
|
||||
assert isinstance(tx.get("txid"), str)
|
||||
assert check_hash_format(tx.get("txid"))
|
||||
|
||||
# But it should fail for not proper formatted one
|
||||
for v in MIXED_VALUES:
|
||||
@@ -124,8 +125,8 @@ def test_getrawtransaction(genesis_block_hash):
|
||||
genesis_tx = bitcoin_cli.getblock(genesis_block_hash).get("tx")[0]
|
||||
tx = bitcoin_cli.getrawtransaction(genesis_tx)
|
||||
|
||||
assert(isinstance(tx, dict))
|
||||
assert(isinstance(tx.get('confirmations'), int))
|
||||
assert isinstance(tx, dict)
|
||||
assert isinstance(tx.get("confirmations"), int)
|
||||
|
||||
for v in MIXED_VALUES:
|
||||
try:
|
||||
@@ -138,9 +139,5 @@ def test_getrawtransaction(genesis_block_hash):
|
||||
def test_getblockcount():
|
||||
# getblockcount should always return a positive integer
|
||||
bc = bitcoin_cli.getblockcount()
|
||||
assert (isinstance(bc, int))
|
||||
assert (bc >= 0)
|
||||
|
||||
|
||||
|
||||
|
||||
assert isinstance(bc, int)
|
||||
assert bc >= 0
|
||||
@@ -137,14 +137,16 @@ class TX:
|
||||
tx.prev_out_index = [prev_out_index]
|
||||
tx.nLockTime = 0
|
||||
tx.scriptSig = [
|
||||
'47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860'
|
||||
'a4acdd12909d831cc56cbbac4622082221a8768d1d0901']
|
||||
"47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860"
|
||||
"a4acdd12909d831cc56cbbac4622082221a8768d1d0901"
|
||||
]
|
||||
tx.scriptSig_len = [77]
|
||||
tx.nSequence = [4294967295]
|
||||
tx.value = [5000000000]
|
||||
tx.scriptPubKey = [
|
||||
'4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c'
|
||||
'1b7303b8a0626f1baded5c72a704f7e6cd84cac']
|
||||
"4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c"
|
||||
"1b7303b8a0626f1baded5c72a704f7e6cd84cac"
|
||||
]
|
||||
tx.scriptPubKey_len = [67]
|
||||
|
||||
return tx.serialize()
|
||||
|
||||
@@ -17,7 +17,7 @@ def change_endianness(x):
|
||||
|
||||
y = unhexlify(x)
|
||||
z = y[::-1]
|
||||
return hexlify(z).decode('utf-8')
|
||||
return hexlify(z).decode("utf-8")
|
||||
|
||||
|
||||
def parse_varint(tx):
|
||||
@@ -30,10 +30,10 @@ def parse_varint(tx):
|
||||
|
||||
# First of all, the offset of the hex transaction if moved to the proper position (i.e where the varint should be
|
||||
# located) and the length and format of the data to be analyzed is checked.
|
||||
data = tx.hex[tx.offset:]
|
||||
assert (len(data) > 0)
|
||||
data = tx.hex[tx.offset :]
|
||||
assert len(data) > 0
|
||||
size = int(data[:2], 16)
|
||||
assert (size <= 255)
|
||||
assert size <= 255
|
||||
|
||||
# Then, the integer is encoded as a varint using the proper prefix, if needed.
|
||||
if size <= 252: # No prefix
|
||||
@@ -49,7 +49,7 @@ def parse_varint(tx):
|
||||
|
||||
# Finally, the storage length is used to extract the proper number of bytes from the transaction hex and the
|
||||
# transaction offset is updated.
|
||||
varint = data[:storage_length * 2]
|
||||
varint = data[: storage_length * 2]
|
||||
tx.offset += storage_length * 2
|
||||
|
||||
return varint
|
||||
@@ -65,7 +65,7 @@ def parse_element(tx, size):
|
||||
:rtype: hex str
|
||||
"""
|
||||
|
||||
element = tx.hex[tx.offset:tx.offset + size * 2]
|
||||
element = tx.hex[tx.offset : tx.offset + size * 2]
|
||||
tx.offset += size * 2
|
||||
return element
|
||||
|
||||
@@ -97,7 +97,7 @@ def encode_varint(value):
|
||||
prefix = 255 # 0xFF
|
||||
else:
|
||||
raise Exception("Wrong input data size")
|
||||
varint = format(prefix, 'x') + change_endianness(int2bytes(value, size))
|
||||
varint = format(prefix, "x") + change_endianness(int2bytes(value, size))
|
||||
|
||||
return varint
|
||||
|
||||
@@ -112,12 +112,13 @@ def int2bytes(a, b):
|
||||
:rtype: hex str
|
||||
"""
|
||||
|
||||
m = pow(2, 8*b) - 1
|
||||
m = pow(2, 8 * b) - 1
|
||||
if a > m:
|
||||
raise Exception(str(a) + " is too big to be represented with " + str(b) + " bytes. Maximum value is "
|
||||
+ str(m) + ".")
|
||||
raise Exception(
|
||||
str(a) + " is too big to be represented with " + str(b) + " bytes. Maximum value is " + str(m) + "."
|
||||
)
|
||||
|
||||
return ('%0' + str(2 * b) + 'x') % a
|
||||
return ("%0" + str(2 * b) + "x") % a
|
||||
|
||||
|
||||
def sha256d(hex_data):
|
||||
@@ -125,4 +126,3 @@ def sha256d(hex_data):
|
||||
double_sha256 = sha256(sha256(data).digest()).hexdigest()
|
||||
|
||||
return change_endianness(double_sha256)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from test.simulator.transaction import TX
|
||||
from test.simulator.bitcoind_sim import run_simulator, HOST, PORT
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.fixture(scope="session")
|
||||
def run_bitcoind():
|
||||
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event"})
|
||||
bitcoind_thread.daemon = True
|
||||
@@ -30,7 +30,7 @@ def run_bitcoind():
|
||||
sleep(0.1)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
@pytest.fixture(scope="session")
|
||||
def run_api():
|
||||
db_manager = DBManager(DB_PATH)
|
||||
watcher = Watcher(db_manager)
|
||||
@@ -43,23 +43,23 @@ def run_api():
|
||||
sleep(0.1)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', autouse=True)
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def prng_seed():
|
||||
random.seed(0)
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def db_manager():
|
||||
manager = DBManager('test_db')
|
||||
manager = DBManager("test_db")
|
||||
yield manager
|
||||
|
||||
manager.db.close()
|
||||
rmtree('test_db')
|
||||
rmtree("test_db")
|
||||
|
||||
|
||||
def get_random_value_hex(nbytes):
|
||||
pseudo_random_value = random.getrandbits(8 * nbytes)
|
||||
prv_hex = '{:x}'.format(pseudo_random_value)
|
||||
prv_hex = "{:x}".format(pseudo_random_value)
|
||||
return prv_hex.zfill(2 * nbytes)
|
||||
|
||||
|
||||
@@ -80,8 +80,13 @@ def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30):
|
||||
dispute_txid = sha256d(dispute_tx)
|
||||
justice_tx = TX.create_dummy_transaction(dispute_txid)
|
||||
|
||||
dummy_appointment_data = {"tx": justice_tx, "tx_id": dispute_txid, "start_time": current_height + start_time_offset,
|
||||
"end_time": current_height + end_time_offset, "dispute_delta": 20}
|
||||
dummy_appointment_data = {
|
||||
"tx": justice_tx,
|
||||
"tx_id": dispute_txid,
|
||||
"start_time": current_height + start_time_offset,
|
||||
"end_time": current_height + end_time_offset,
|
||||
"dispute_delta": 20,
|
||||
}
|
||||
|
||||
cipher = "AES-GCM-128"
|
||||
hash_function = "SHA256"
|
||||
@@ -91,18 +96,24 @@ def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30):
|
||||
|
||||
encrypted_blob = blob.encrypt((dummy_appointment_data.get("tx_id")))
|
||||
|
||||
appointment_data = {"locator": locator, "start_time": dummy_appointment_data.get("start_time"),
|
||||
"end_time": dummy_appointment_data.get("end_time"),
|
||||
"dispute_delta": dummy_appointment_data.get("dispute_delta"),
|
||||
"encrypted_blob": encrypted_blob, "cipher": cipher, "hash_function": hash_function,
|
||||
"triggered": False}
|
||||
appointment_data = {
|
||||
"locator": locator,
|
||||
"start_time": dummy_appointment_data.get("start_time"),
|
||||
"end_time": dummy_appointment_data.get("end_time"),
|
||||
"dispute_delta": dummy_appointment_data.get("dispute_delta"),
|
||||
"encrypted_blob": encrypted_blob,
|
||||
"cipher": cipher,
|
||||
"hash_function": hash_function,
|
||||
"triggered": False,
|
||||
}
|
||||
|
||||
return appointment_data, dispute_tx
|
||||
|
||||
|
||||
def generate_dummy_appointment(start_time_offset=5, end_time_offset=30):
|
||||
appointment_data, dispute_tx = generate_dummy_appointment_data(start_time_offset=start_time_offset,
|
||||
end_time_offset=end_time_offset)
|
||||
appointment_data, dispute_tx = generate_dummy_appointment_data(
|
||||
start_time_offset=start_time_offset, end_time_offset=end_time_offset
|
||||
)
|
||||
|
||||
return Appointment.from_dict(appointment_data), dispute_tx
|
||||
|
||||
@@ -112,7 +123,8 @@ def generate_dummy_job():
|
||||
justice_txid = get_random_value_hex(32)
|
||||
justice_rawtx = get_random_value_hex(100)
|
||||
|
||||
job_data = dict(dispute_txid=dispute_txid, justice_txid=justice_txid, justice_rawtx=justice_rawtx,
|
||||
appointment_end=100)
|
||||
job_data = dict(
|
||||
dispute_txid=dispute_txid, justice_txid=justice_txid, justice_rawtx=justice_rawtx, appointment_end=100
|
||||
)
|
||||
|
||||
return Job.from_dict(job_data)
|
||||
|
||||
@@ -36,22 +36,22 @@ def add_appointment(appointment):
|
||||
def test_add_appointment(run_api, run_bitcoind, new_appointment):
|
||||
# Properly formatted appointment
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
# Incorrect appointment
|
||||
new_appointment["dispute_delta"] = 0
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 400)
|
||||
assert r.status_code == 400
|
||||
|
||||
|
||||
def test_request_appointment(new_appointment):
|
||||
# First we need to add an appointment
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
# Next we can request it
|
||||
r = requests.get(url=PISA_API + "/get_appointment?locator=" + new_appointment["locator"])
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
# Each locator may point to multiple appointments, check them all
|
||||
received_appointments = json.loads(r.content)
|
||||
@@ -60,20 +60,20 @@ def test_request_appointment(new_appointment):
|
||||
appointment_status = [appointment.pop("status") for appointment in received_appointments]
|
||||
|
||||
# Check that the appointment is within the received appoints
|
||||
assert (new_appointment in received_appointments)
|
||||
assert new_appointment in received_appointments
|
||||
|
||||
# Check that all the appointments are being watched
|
||||
assert (all([status == "being_watched" for status in appointment_status]))
|
||||
assert all([status == "being_watched" for status in appointment_status])
|
||||
|
||||
|
||||
def test_request_random_appointment():
|
||||
r = requests.get(url=PISA_API + "/get_appointment?locator=" + get_random_value_hex(32))
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
received_appointments = json.loads(r.content)
|
||||
appointment_status = [appointment.pop("status") for appointment in received_appointments]
|
||||
|
||||
assert (all([status == "not_found" for status in appointment_status]))
|
||||
assert all([status == "not_found" for status in appointment_status])
|
||||
|
||||
|
||||
def test_add_appointment_multiple_times(new_appointment, n=MULTIPLE_APPOINTMENTS):
|
||||
@@ -81,29 +81,29 @@ def test_add_appointment_multiple_times(new_appointment, n=MULTIPLE_APPOINTMENTS
|
||||
# DISCUSS: #34-store-identical-appointments
|
||||
for _ in range(n):
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_request_multiple_appointments_same_locator(new_appointment, n=MULTIPLE_APPOINTMENTS):
|
||||
for _ in range(n):
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
test_request_appointment(new_appointment)
|
||||
|
||||
|
||||
def test_add_too_many_appointment(new_appointment):
|
||||
for _ in range(MAX_APPOINTMENTS-len(appointments)):
|
||||
for _ in range(MAX_APPOINTMENTS - len(appointments)):
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 200)
|
||||
assert r.status_code == 200
|
||||
|
||||
r = add_appointment(new_appointment)
|
||||
assert (r.status_code == 503)
|
||||
assert r.status_code == 503
|
||||
|
||||
|
||||
def test_get_all_appointments_watcher():
|
||||
r = requests.get(url=PISA_API + "/get_all_appointments")
|
||||
assert (r.status_code == 200 and r.reason == 'OK')
|
||||
assert r.status_code == 200 and r.reason == "OK"
|
||||
|
||||
received_appointments = json.loads(r.content)
|
||||
|
||||
@@ -111,8 +111,8 @@ def test_get_all_appointments_watcher():
|
||||
watcher_locators = [v["locator"] for k, v in received_appointments["watcher_appointments"].items()]
|
||||
local_locators = [appointment["locator"] for appointment in appointments]
|
||||
|
||||
assert(set(watcher_locators) == set(local_locators))
|
||||
assert(len(received_appointments["responder_jobs"]) == 0)
|
||||
assert set(watcher_locators) == set(local_locators)
|
||||
assert len(received_appointments["responder_jobs"]) == 0
|
||||
|
||||
|
||||
def test_get_all_appointments_responder():
|
||||
@@ -138,5 +138,5 @@ def test_get_all_appointments_responder():
|
||||
watcher_appointments = [v["locator"] for k, v in received_appointments["watcher_appointments"].items()]
|
||||
print(set(watcher_appointments) == set(local_locators))
|
||||
|
||||
assert (set(responder_jobs) == set(local_locators))
|
||||
assert (len(received_appointments["watcher_appointments"]) == 0)
|
||||
assert set(responder_jobs) == set(local_locators)
|
||||
assert len(received_appointments["watcher_appointments"]) == 0
|
||||
|
||||
@@ -33,9 +33,15 @@ def test_init_appointment(appointment_data):
|
||||
|
||||
appointment = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function)
|
||||
|
||||
assert (locator == appointment.locator and start_time == appointment.start_time and end_time == appointment.end_time
|
||||
and EncryptedBlob(encrypted_blob_data) == appointment.encrypted_blob and cipher == appointment.cipher
|
||||
and dispute_delta == appointment.dispute_delta and hash_function == appointment.hash_function)
|
||||
assert (
|
||||
locator == appointment.locator
|
||||
and start_time == appointment.start_time
|
||||
and end_time == appointment.end_time
|
||||
and EncryptedBlob(encrypted_blob_data) == appointment.encrypted_blob
|
||||
and cipher == appointment.cipher
|
||||
and dispute_delta == appointment.dispute_delta
|
||||
and hash_function == appointment.hash_function
|
||||
)
|
||||
|
||||
|
||||
def test_to_dict(appointment_data):
|
||||
@@ -44,10 +50,15 @@ def test_to_dict(appointment_data):
|
||||
|
||||
dict_appointment = appointment.to_dict()
|
||||
|
||||
assert (locator == dict_appointment.get("locator") and start_time == dict_appointment.get("start_time")
|
||||
and end_time == dict_appointment.get("end_time") and dispute_delta == dict_appointment.get("dispute_delta")
|
||||
and cipher == dict_appointment.get("cipher") and hash_function == dict_appointment.get("hash_function")
|
||||
and encrypted_blob_data == dict_appointment.get("encrypted_blob"))
|
||||
assert (
|
||||
locator == dict_appointment.get("locator")
|
||||
and start_time == dict_appointment.get("start_time")
|
||||
and end_time == dict_appointment.get("end_time")
|
||||
and dispute_delta == dict_appointment.get("dispute_delta")
|
||||
and cipher == dict_appointment.get("cipher")
|
||||
and hash_function == dict_appointment.get("hash_function")
|
||||
and encrypted_blob_data == dict_appointment.get("encrypted_blob")
|
||||
)
|
||||
|
||||
|
||||
def test_to_json(appointment_data):
|
||||
@@ -56,7 +67,12 @@ def test_to_json(appointment_data):
|
||||
|
||||
dict_appointment = json.loads(appointment.to_json())
|
||||
|
||||
assert (locator == dict_appointment.get("locator") and start_time == dict_appointment.get("start_time")
|
||||
and end_time == dict_appointment.get("end_time") and dispute_delta == dict_appointment.get("dispute_delta")
|
||||
and cipher == dict_appointment.get("cipher") and hash_function == dict_appointment.get("hash_function")
|
||||
and encrypted_blob_data == dict_appointment.get("encrypted_blob"))
|
||||
assert (
|
||||
locator == dict_appointment.get("locator")
|
||||
and start_time == dict_appointment.get("start_time")
|
||||
and end_time == dict_appointment.get("end_time")
|
||||
and dispute_delta == dict_appointment.get("dispute_delta")
|
||||
and cipher == dict_appointment.get("cipher")
|
||||
and hash_function == dict_appointment.get("hash_function")
|
||||
and encrypted_blob_data == dict_appointment.get("encrypted_blob")
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ def test_init_blob():
|
||||
|
||||
for case in cipher_cases:
|
||||
blob = Blob(data, case, hash_function)
|
||||
assert(blob.data == data and blob.cipher == case and blob.hash_function == hash_function)
|
||||
assert blob.data == data and blob.cipher == case and blob.hash_function == hash_function
|
||||
|
||||
# Fixed (valid) cipher, try different valid hash functions
|
||||
cipher = SUPPORTED_CIPHERS[0]
|
||||
@@ -27,7 +27,7 @@ def test_init_blob():
|
||||
|
||||
for case in hash_function_cases:
|
||||
blob = Blob(data, cipher, case)
|
||||
assert(blob.data == data and blob.cipher == cipher and blob.hash_function == case)
|
||||
assert blob.data == data and blob.cipher == cipher and blob.hash_function == case
|
||||
|
||||
# Invalid data
|
||||
data = unhexlify(get_random_value_hex(64))
|
||||
@@ -87,4 +87,4 @@ def test_encrypt():
|
||||
# Check that two encryptions of the same data have the same result
|
||||
encrypted_blob2 = blob.encrypt(key)
|
||||
|
||||
assert(encrypted_blob == encrypted_blob2 and id(encrypted_blob) != id(encrypted_blob2))
|
||||
assert encrypted_blob == encrypted_blob2 and id(encrypted_blob) != id(encrypted_blob2)
|
||||
|
||||
@@ -13,12 +13,12 @@ APPOINTMENT_COUNT = 100
|
||||
TEST_SET_SIZE = 200
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def txids():
|
||||
return [get_random_value_hex(32) for _ in range(APPOINTMENT_COUNT)]
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def locator_uuid_map(txids):
|
||||
return {sha256(unhexlify(txid)).hexdigest(): uuid4().hex for txid in txids}
|
||||
|
||||
@@ -40,7 +40,7 @@ def test_get_block(best_block_hash):
|
||||
# Checking that the received block has at least the fields we need
|
||||
# FIXME: We could be more strict here, but we'll need to add those restrictions to bitcoind_sim too
|
||||
assert isinstance(block, dict)
|
||||
assert block.get('hash') == best_block_hash and 'height' in block and 'previousblockhash' in block and 'tx' in block
|
||||
assert block.get("hash") == best_block_hash and "height" in block and "previousblockhash" in block and "tx" in block
|
||||
|
||||
|
||||
def test_get_random_block():
|
||||
|
||||
@@ -74,6 +74,3 @@ def test_build_block_queue():
|
||||
blocks.remove(block)
|
||||
|
||||
assert len(blocks) == 0
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ c_logger.disabled = True
|
||||
sent_txs = []
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def carrier():
|
||||
return Carrier()
|
||||
|
||||
@@ -28,7 +28,7 @@ def test_send_transaction(run_bitcoind, carrier):
|
||||
|
||||
receipt = carrier.send_transaction(tx, txid)
|
||||
|
||||
assert(receipt.delivered is True)
|
||||
assert receipt.delivered is True
|
||||
|
||||
|
||||
def test_send_double_spending_transaction(carrier):
|
||||
@@ -47,9 +47,8 @@ def test_send_double_spending_transaction(carrier):
|
||||
|
||||
# The carrier should report delivered True for both, but in the second case the transaction was already delivered
|
||||
# (either by himself or someone else)
|
||||
assert(receipt.delivered is True)
|
||||
assert (receipt2.delivered is True and receipt2.confirmations >= 1
|
||||
and receipt2.reason == RPC_VERIFY_ALREADY_IN_CHAIN)
|
||||
assert receipt.delivered is True
|
||||
assert receipt2.delivered is True and receipt2.confirmations >= 1 and receipt2.reason == RPC_VERIFY_ALREADY_IN_CHAIN
|
||||
|
||||
|
||||
def test_send_transaction_invalid_format(carrier):
|
||||
@@ -58,7 +57,7 @@ def test_send_transaction_invalid_format(carrier):
|
||||
txid = sha256d(tx)
|
||||
receipt = carrier.send_transaction(txid, txid)
|
||||
|
||||
assert (receipt.delivered is False and receipt.reason == RPC_DESERIALIZATION_ERROR)
|
||||
assert receipt.delivered is False and receipt.reason == RPC_DESERIALIZATION_ERROR
|
||||
|
||||
|
||||
def test_get_transaction():
|
||||
@@ -73,5 +72,3 @@ def test_get_non_existing_transaction():
|
||||
tx_info = Carrier.get_transaction(get_random_value_hex(32))
|
||||
|
||||
assert tx_info is None
|
||||
|
||||
|
||||
|
||||
@@ -9,12 +9,12 @@ from test.unit.conftest import get_random_value_hex, generate_dummy_appointment
|
||||
from pisa.db_manager import WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY, LOCATOR_MAP_PREFIX
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def watcher_appointments():
|
||||
return {uuid4().hex: generate_dummy_appointment()[0] for _ in range(10)}
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
@pytest.fixture(scope="module")
|
||||
def responder_jobs():
|
||||
return {get_random_value_hex(32): get_random_value_hex(32) for _ in range(10)}
|
||||
|
||||
@@ -31,7 +31,7 @@ def open_create_db(db_path):
|
||||
|
||||
|
||||
def test_init():
|
||||
db_path = 'init_test_db'
|
||||
db_path = "init_test_db"
|
||||
|
||||
# First we check if the db exists, and if so we delete it
|
||||
if os.path.isdir(db_path):
|
||||
@@ -57,7 +57,7 @@ def test_init():
|
||||
|
||||
def test_load_appointments_db(db_manager):
|
||||
# Let's made up a prefix and try to load data from the database using it
|
||||
prefix = 'XX'
|
||||
prefix = "XX"
|
||||
db_appointments = db_manager.load_appointments_db(prefix)
|
||||
|
||||
assert len(db_appointments) == 0
|
||||
@@ -69,7 +69,7 @@ def test_load_appointments_db(db_manager):
|
||||
value = get_random_value_hex(32)
|
||||
local_appointments[key] = value
|
||||
|
||||
db_manager.db.put((prefix+key).encode('utf-8'), json.dumps({'value': value}).encode('utf-8'))
|
||||
db_manager.db.put((prefix + key).encode("utf-8"), json.dumps({"value": value}).encode("utf-8"))
|
||||
|
||||
db_appointments = db_manager.load_appointments_db(prefix)
|
||||
|
||||
@@ -88,7 +88,7 @@ def test_get_last_known_block(db_manager):
|
||||
# After saving some block in the db we should get that exact value
|
||||
for key in [WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY]:
|
||||
block_hash = get_random_value_hex(32)
|
||||
db_manager.db.put(key.encode('utf-8'), block_hash.encode('utf-8'))
|
||||
db_manager.db.put(key.encode("utf-8"), block_hash.encode("utf-8"))
|
||||
assert db_manager.get_last_known_block(key) == block_hash
|
||||
|
||||
|
||||
@@ -100,24 +100,24 @@ def test_create_entry(db_manager):
|
||||
db_manager.create_entry(key, value)
|
||||
|
||||
# We should be able to get it straightaway from the key
|
||||
assert db_manager.db.get(key.encode('utf-8')).decode('utf-8') == value
|
||||
assert db_manager.db.get(key.encode("utf-8")).decode("utf-8") == value
|
||||
|
||||
# If we prefix the key we should be able to get it if we add the prefix, but not otherwise
|
||||
key = get_random_value_hex(32)
|
||||
prefix = 'w'
|
||||
prefix = "w"
|
||||
db_manager.create_entry(key, value, prefix=prefix)
|
||||
|
||||
assert db_manager.db.get((prefix+key).encode('utf-8')).decode('utf-8') == value
|
||||
assert db_manager.db.get(key.encode('utf-8')) is None
|
||||
assert db_manager.db.get((prefix + key).encode("utf-8")).decode("utf-8") == value
|
||||
assert db_manager.db.get(key.encode("utf-8")) is None
|
||||
|
||||
# Same if we try to use any other prefix
|
||||
another_prefix = 'r'
|
||||
assert db_manager.db.get((another_prefix+key).encode('utf-8')) is None
|
||||
another_prefix = "r"
|
||||
assert db_manager.db.get((another_prefix + key).encode("utf-8")) is None
|
||||
|
||||
|
||||
def test_delete_entry(db_manager):
|
||||
# Let's first get the key all the things we've wrote so far in the db
|
||||
data = [k.decode('utf-8') for k, v in db_manager.db.iterator()]
|
||||
data = [k.decode("utf-8") for k, v in db_manager.db.iterator()]
|
||||
|
||||
# Let's empty the db now
|
||||
for key in data:
|
||||
@@ -132,11 +132,11 @@ def test_delete_entry(db_manager):
|
||||
db_manager.create_entry(key, value, prefix)
|
||||
|
||||
# Checks it's there
|
||||
assert db_manager.db.get((prefix + key).encode('utf-8')).decode('utf-8') == value
|
||||
assert db_manager.db.get((prefix + key).encode("utf-8")).decode("utf-8") == value
|
||||
|
||||
# And now it's gone
|
||||
db_manager.delete_entry(key, prefix)
|
||||
assert db_manager.db.get((prefix+key).encode('utf-8')) is None
|
||||
assert db_manager.db.get((prefix + key).encode("utf-8")) is None
|
||||
|
||||
|
||||
def test_load_watcher_appointments_empty(db_manager):
|
||||
@@ -172,14 +172,14 @@ def test_store_update_locator_map_empty(db_manager):
|
||||
|
||||
def test_delete_locator_map(db_manager):
|
||||
locator_maps = db_manager.load_appointments_db(prefix=LOCATOR_MAP_PREFIX)
|
||||
assert(len(locator_maps) != 0)
|
||||
assert len(locator_maps) != 0
|
||||
|
||||
for locator, uuids in locator_maps.items():
|
||||
print(locator)
|
||||
db_manager.delete_locator_map(locator)
|
||||
|
||||
locator_maps = db_manager.load_appointments_db(prefix=LOCATOR_MAP_PREFIX)
|
||||
assert (len(locator_maps) == 0)
|
||||
assert len(locator_maps) == 0
|
||||
|
||||
|
||||
def test_store_load_watcher_appointment(db_manager, watcher_appointments):
|
||||
@@ -200,7 +200,7 @@ def test_store_load_watcher_appointment(db_manager, watcher_appointments):
|
||||
|
||||
def test_store_load_appointment_jobs(db_manager, responder_jobs):
|
||||
for key, value in responder_jobs.items():
|
||||
db_manager.store_responder_job(key, json.dumps({'value': value}))
|
||||
db_manager.store_responder_job(key, json.dumps({"value": value}))
|
||||
|
||||
db_responder_jobs = db_manager.load_responder_jobs()
|
||||
|
||||
@@ -252,6 +252,3 @@ def test_store_load_last_block_hash_responder(db_manager):
|
||||
db_last_block_hash = db_manager.load_last_block_hash_responder()
|
||||
|
||||
assert local_last_block_hash == db_last_block_hash
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ c_logger.disabled = True
|
||||
def test_init_encrypted_blob():
|
||||
# No much to test here, basically that the object is properly created
|
||||
data = get_random_value_hex(64)
|
||||
assert (EncryptedBlob(data).data == data)
|
||||
assert EncryptedBlob(data).data == data
|
||||
|
||||
|
||||
def test_decrypt():
|
||||
@@ -24,8 +24,8 @@ def test_decrypt():
|
||||
|
||||
# Valid data should run with no InvalidTag and verify
|
||||
data = "6097cdf52309b1b2124efeed36bd34f46dc1c25ad23ac86f28380f746254f777"
|
||||
key = 'b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659'
|
||||
key = "b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659"
|
||||
encrypted_data = "092e93d4a34aac4367075506f2c050ddfa1a201ee6669b65058572904dcea642aeb01ea4b57293618e8c46809dfadadc"
|
||||
encrypted_blob = EncryptedBlob(encrypted_data)
|
||||
|
||||
assert(encrypted_blob.decrypt(key) == data)
|
||||
assert encrypted_blob.decrypt(key) == data
|
||||
|
||||
@@ -13,38 +13,38 @@ c_logger.disabled = True
|
||||
inspector = Inspector()
|
||||
APPOINTMENT_OK = (0, None)
|
||||
|
||||
NO_HEX_STRINGS = ["R" * 64, get_random_value_hex(31) + "PP", "$"*64, " "*64]
|
||||
WRONG_TYPES = [[], '', get_random_value_hex(32), 3.2, 2.0, (), object, {}, " "*32, object()]
|
||||
NO_HEX_STRINGS = ["R" * 64, get_random_value_hex(31) + "PP", "$" * 64, " " * 64]
|
||||
WRONG_TYPES = [[], "", get_random_value_hex(32), 3.2, 2.0, (), object, {}, " " * 32, object()]
|
||||
WRONG_TYPES_NO_STR = [[], unhexlify(get_random_value_hex(32)), 3.2, 2.0, (), object, {}, object()]
|
||||
|
||||
|
||||
def test_check_locator():
|
||||
# Right appointment type, size and format
|
||||
locator = get_random_value_hex(32)
|
||||
assert(Inspector.check_locator(locator) == APPOINTMENT_OK)
|
||||
assert Inspector.check_locator(locator) == APPOINTMENT_OK
|
||||
|
||||
# Wrong size (too big)
|
||||
locator = get_random_value_hex(33)
|
||||
assert(Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE)
|
||||
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE
|
||||
|
||||
# Wrong size (too small)
|
||||
locator = get_random_value_hex(31)
|
||||
assert(Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE)
|
||||
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE
|
||||
|
||||
# Empty
|
||||
locator = None
|
||||
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_locator(locator)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
# Wrong type (several types tested, it should do for anything that is not a string)
|
||||
locators = [[], -1, 3.2, 0, 4, (), object, {}, object()]
|
||||
|
||||
for locator in locators:
|
||||
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
# Wrong format (no hex)
|
||||
locators = NO_HEX_STRINGS
|
||||
for locator in locators:
|
||||
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_FORMAT)
|
||||
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_FORMAT
|
||||
|
||||
|
||||
def test_check_start_time():
|
||||
@@ -53,21 +53,21 @@ def test_check_start_time():
|
||||
|
||||
# Right format and right value (start time in the future)
|
||||
start_time = 101
|
||||
assert (Inspector.check_start_time(start_time, current_time) == APPOINTMENT_OK)
|
||||
assert Inspector.check_start_time(start_time, current_time) == APPOINTMENT_OK
|
||||
|
||||
# Start time too small (either same block or block in the past)
|
||||
start_times = [100, 99, 98, -1]
|
||||
for start_time in start_times:
|
||||
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
|
||||
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
|
||||
|
||||
# Empty field
|
||||
start_time = None
|
||||
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
# Wrong data type
|
||||
start_times = WRONG_TYPES
|
||||
for start_time in start_times:
|
||||
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
|
||||
def test_check_end_time():
|
||||
@@ -77,54 +77,54 @@ def test_check_end_time():
|
||||
|
||||
# Right format and right value (start time before end and end in the future)
|
||||
end_time = 121
|
||||
assert (Inspector.check_end_time(end_time, start_time, current_time) == APPOINTMENT_OK)
|
||||
assert Inspector.check_end_time(end_time, start_time, current_time) == APPOINTMENT_OK
|
||||
|
||||
# End time too small (start time after end time)
|
||||
end_times = [120, 119, 118, -1]
|
||||
for end_time in end_times:
|
||||
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
|
||||
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
|
||||
|
||||
# End time too small (either same height as current block or in the past)
|
||||
current_time = 130
|
||||
end_times = [130, 129, 128, -1]
|
||||
for end_time in end_times:
|
||||
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
|
||||
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
|
||||
|
||||
# Empty field
|
||||
end_time = None
|
||||
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
# Wrong data type
|
||||
end_times = WRONG_TYPES
|
||||
for end_time in end_times:
|
||||
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
|
||||
def test_check_delta():
|
||||
# Right value, right format
|
||||
deltas = [MIN_DISPUTE_DELTA, MIN_DISPUTE_DELTA+1, MIN_DISPUTE_DELTA+1000]
|
||||
deltas = [MIN_DISPUTE_DELTA, MIN_DISPUTE_DELTA + 1, MIN_DISPUTE_DELTA + 1000]
|
||||
for delta in deltas:
|
||||
assert (Inspector.check_delta(delta) == APPOINTMENT_OK)
|
||||
assert Inspector.check_delta(delta) == APPOINTMENT_OK
|
||||
|
||||
# Delta too small
|
||||
deltas = [MIN_DISPUTE_DELTA-1, MIN_DISPUTE_DELTA-2, 0, -1, -1000]
|
||||
deltas = [MIN_DISPUTE_DELTA - 1, MIN_DISPUTE_DELTA - 2, 0, -1, -1000]
|
||||
for delta in deltas:
|
||||
assert (Inspector.check_delta(delta)[0] == APPOINTMENT_FIELD_TOO_SMALL)
|
||||
assert Inspector.check_delta(delta)[0] == APPOINTMENT_FIELD_TOO_SMALL
|
||||
|
||||
# Empty field
|
||||
delta = None
|
||||
assert(Inspector.check_delta(delta)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_delta(delta)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
# Wrong data type
|
||||
deltas = WRONG_TYPES
|
||||
for delta in deltas:
|
||||
assert (Inspector.check_delta(delta)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_delta(delta)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
|
||||
def test_check_blob():
|
||||
# Right format and length
|
||||
encrypted_blob = get_random_value_hex(120)
|
||||
assert(Inspector.check_blob(encrypted_blob) == APPOINTMENT_OK)
|
||||
assert Inspector.check_blob(encrypted_blob) == APPOINTMENT_OK
|
||||
|
||||
# # Wrong content
|
||||
# # FIXME: There is not proper defined format for this yet. It should be restricted by size at least, and check it
|
||||
@@ -133,16 +133,16 @@ def test_check_blob():
|
||||
# Wrong type
|
||||
encrypted_blobs = WRONG_TYPES_NO_STR
|
||||
for encrypted_blob in encrypted_blobs:
|
||||
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
# Empty field
|
||||
encrypted_blob = None
|
||||
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
# Wrong format (no hex)
|
||||
encrypted_blobs = NO_HEX_STRINGS
|
||||
for encrypted_blob in encrypted_blobs:
|
||||
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_FORMAT)
|
||||
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_FORMAT
|
||||
|
||||
|
||||
def test_check_cipher():
|
||||
@@ -150,21 +150,21 @@ def test_check_cipher():
|
||||
for cipher in SUPPORTED_CIPHERS:
|
||||
cipher_cases = [cipher, cipher.lower(), cipher.capitalize()]
|
||||
for case in cipher_cases:
|
||||
assert(Inspector.check_cipher(case) == APPOINTMENT_OK)
|
||||
assert Inspector.check_cipher(case) == APPOINTMENT_OK
|
||||
|
||||
# Wrong type
|
||||
ciphers = WRONG_TYPES_NO_STR
|
||||
for cipher in ciphers:
|
||||
assert(Inspector.check_cipher(cipher)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
# Wrong value
|
||||
ciphers = NO_HEX_STRINGS
|
||||
for cipher in ciphers:
|
||||
assert(Inspector.check_cipher(cipher)[0] == APPOINTMENT_CIPHER_NOT_SUPPORTED)
|
||||
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_CIPHER_NOT_SUPPORTED
|
||||
|
||||
# Empty field
|
||||
cipher = None
|
||||
assert (Inspector.check_cipher(cipher)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
|
||||
def test_check_hash_function():
|
||||
@@ -172,21 +172,21 @@ def test_check_hash_function():
|
||||
for hash_function in SUPPORTED_HASH_FUNCTIONS:
|
||||
hash_function_cases = [hash_function, hash_function.lower(), hash_function.capitalize()]
|
||||
for case in hash_function_cases:
|
||||
assert (Inspector.check_hash_function(case) == APPOINTMENT_OK)
|
||||
assert Inspector.check_hash_function(case) == APPOINTMENT_OK
|
||||
|
||||
# Wrong type
|
||||
hash_functions = WRONG_TYPES_NO_STR
|
||||
for hash_function in hash_functions:
|
||||
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
|
||||
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_WRONG_FIELD_TYPE
|
||||
|
||||
# Wrong value
|
||||
hash_functions = NO_HEX_STRINGS
|
||||
for hash_function in hash_functions:
|
||||
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED)
|
||||
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED
|
||||
|
||||
# Empty field
|
||||
hash_function = None
|
||||
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_EMPTY_FIELD)
|
||||
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_EMPTY_FIELD
|
||||
|
||||
|
||||
def test_inspect(run_bitcoind):
|
||||
@@ -196,7 +196,7 @@ def test_inspect(run_bitcoind):
|
||||
# Invalid appointment, every field is empty
|
||||
appointment_data = dict()
|
||||
appointment = inspector.inspect(appointment_data)
|
||||
assert (type(appointment) == tuple and appointment[0] != 0)
|
||||
assert type(appointment) == tuple and appointment[0] != 0
|
||||
|
||||
# Valid appointment
|
||||
locator = get_random_value_hex(32)
|
||||
@@ -207,13 +207,25 @@ def test_inspect(run_bitcoind):
|
||||
cipher = SUPPORTED_CIPHERS[0]
|
||||
hash_function = SUPPORTED_HASH_FUNCTIONS[0]
|
||||
|
||||
appointment_data = {"locator": locator, "start_time": start_time, "end_time": end_time,
|
||||
"dispute_delta": dispute_delta, "encrypted_blob": encrypted_blob, "cipher": cipher,
|
||||
"hash_function": hash_function}
|
||||
appointment_data = {
|
||||
"locator": locator,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"dispute_delta": dispute_delta,
|
||||
"encrypted_blob": encrypted_blob,
|
||||
"cipher": cipher,
|
||||
"hash_function": hash_function,
|
||||
}
|
||||
|
||||
appointment = inspector.inspect(appointment_data)
|
||||
|
||||
assert(type(appointment) == Appointment and appointment.locator == locator and appointment.start_time == start_time
|
||||
and appointment.end_time == end_time and appointment.dispute_delta == dispute_delta and
|
||||
appointment.encrypted_blob.data == encrypted_blob and appointment.cipher == cipher and
|
||||
appointment.hash_function == hash_function)
|
||||
assert (
|
||||
type(appointment) == Appointment
|
||||
and appointment.locator == locator
|
||||
and appointment.start_time == start_time
|
||||
and appointment.end_time == end_time
|
||||
and appointment.dispute_delta == dispute_delta
|
||||
and appointment.encrypted_blob.data == encrypted_blob
|
||||
and appointment.cipher == cipher
|
||||
and appointment.hash_function == hash_function
|
||||
)
|
||||
|
||||
@@ -31,12 +31,14 @@ def create_dummy_job_data(random_txid=False, justice_rawtx=None):
|
||||
justice_txid = "f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16"
|
||||
|
||||
if justice_rawtx is None:
|
||||
justice_rawtx = "0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402" \
|
||||
"204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4" \
|
||||
"acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b" \
|
||||
"13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba" \
|
||||
"ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e" \
|
||||
"cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000"
|
||||
justice_rawtx = (
|
||||
"0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402"
|
||||
"204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4"
|
||||
"acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b"
|
||||
"13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba"
|
||||
"ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e"
|
||||
"cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000"
|
||||
)
|
||||
|
||||
else:
|
||||
justice_txid = sha256d(justice_rawtx)
|
||||
@@ -58,24 +60,34 @@ def test_job_init(run_bitcoind):
|
||||
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data()
|
||||
job = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end)
|
||||
|
||||
assert job.dispute_txid == dispute_txid and job.justice_txid == justice_txid \
|
||||
and job.justice_rawtx == justice_rawtx and job.appointment_end == appointment_end
|
||||
assert (
|
||||
job.dispute_txid == dispute_txid
|
||||
and job.justice_txid == justice_txid
|
||||
and job.justice_rawtx == justice_rawtx
|
||||
and job.appointment_end == appointment_end
|
||||
)
|
||||
|
||||
|
||||
def test_job_to_dict():
|
||||
job = create_dummy_job()
|
||||
job_dict = job.to_dict()
|
||||
|
||||
assert job.locator == job_dict["locator"] and job.justice_rawtx == job_dict["justice_rawtx"] \
|
||||
assert (
|
||||
job.locator == job_dict["locator"]
|
||||
and job.justice_rawtx == job_dict["justice_rawtx"]
|
||||
and job.appointment_end == job_dict["appointment_end"]
|
||||
)
|
||||
|
||||
|
||||
def test_job_to_json():
|
||||
job = create_dummy_job()
|
||||
job_dict = json.loads(job.to_json())
|
||||
|
||||
assert job.locator == job_dict["locator"] and job.justice_rawtx == job_dict["justice_rawtx"] \
|
||||
assert (
|
||||
job.locator == job_dict["locator"]
|
||||
and job.justice_rawtx == job_dict["justice_rawtx"]
|
||||
and job.appointment_end == job_dict["appointment_end"]
|
||||
)
|
||||
|
||||
|
||||
def test_init_responder(responder):
|
||||
@@ -97,8 +109,14 @@ def test_add_response(responder):
|
||||
responder.asleep = False
|
||||
|
||||
# The block_hash passed to add_response does not matter much now. It will in the future to deal with errors
|
||||
receipt = responder.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx, job.appointment_end,
|
||||
block_hash=get_random_value_hex(32))
|
||||
receipt = responder.add_response(
|
||||
uuid,
|
||||
job.dispute_txid,
|
||||
job.justice_txid,
|
||||
job.justice_rawtx,
|
||||
job.appointment_end,
|
||||
block_hash=get_random_value_hex(32),
|
||||
)
|
||||
|
||||
assert receipt.delivered is True
|
||||
|
||||
@@ -124,9 +142,13 @@ def test_create_job(responder):
|
||||
|
||||
# Check that the rest of job data also matches
|
||||
job = responder.jobs[uuid]
|
||||
assert job.dispute_txid == dispute_txid and job.justice_txid == justice_txid \
|
||||
and job.justice_rawtx == justice_rawtx and job.appointment_end == appointment_end \
|
||||
assert (
|
||||
job.dispute_txid == dispute_txid
|
||||
and job.justice_txid == justice_txid
|
||||
and job.justice_rawtx == justice_rawtx
|
||||
and job.appointment_end == appointment_end
|
||||
and job.appointment_end == appointment_end
|
||||
)
|
||||
|
||||
|
||||
def test_create_job_already_confirmed(responder):
|
||||
@@ -134,9 +156,10 @@ def test_create_job_already_confirmed(responder):
|
||||
|
||||
for i in range(20):
|
||||
uuid = uuid4().hex
|
||||
confirmations = i+1
|
||||
confirmations = i + 1
|
||||
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data(
|
||||
justice_rawtx=TX.create_dummy_transaction())
|
||||
justice_rawtx=TX.create_dummy_transaction()
|
||||
)
|
||||
|
||||
responder.create_job(uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations)
|
||||
|
||||
@@ -218,7 +241,7 @@ def test_do_watch(responder):
|
||||
|
||||
def test_get_txs_to_rebroadcast(responder):
|
||||
# Let's create a few fake txids and assign at least 6 missing confirmations to each
|
||||
txs_missing_too_many_conf = {get_random_value_hex(32): 6+i for i in range(10)}
|
||||
txs_missing_too_many_conf = {get_random_value_hex(32): 6 + i for i in range(10)}
|
||||
|
||||
# Let's create some other transaction that has missed some confirmations but not that many
|
||||
txs_missing_some_conf = {get_random_value_hex(32): 3 for _ in range(10)}
|
||||
@@ -299,7 +322,8 @@ def test_rebroadcast(db_manager):
|
||||
for i in range(20):
|
||||
uuid = uuid4().hex
|
||||
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data(
|
||||
justice_rawtx=TX.create_dummy_transaction())
|
||||
justice_rawtx=TX.create_dummy_transaction()
|
||||
)
|
||||
|
||||
responder.jobs[uuid] = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end)
|
||||
responder.tx_job_map[justice_txid] = [uuid]
|
||||
@@ -319,20 +343,3 @@ def test_rebroadcast(db_manager):
|
||||
|
||||
assert receipt.delivered is True
|
||||
assert responder.missed_confirmations[txid] == 0
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -6,9 +6,9 @@ c_logger.disabled = True
|
||||
|
||||
def test_in_correct_network(run_bitcoind):
|
||||
# The simulator runs as if it was regtest, so every other network should fail
|
||||
assert in_correct_network('mainnet') is False
|
||||
assert in_correct_network('testnet') is False
|
||||
assert in_correct_network('regtest') is True
|
||||
assert in_correct_network("mainnet") is False
|
||||
assert in_correct_network("testnet") is False
|
||||
assert in_correct_network("regtest") is True
|
||||
|
||||
|
||||
def test_can_connect_to_bitcoind():
|
||||
@@ -31,13 +31,13 @@ def test_bitcoin_cli():
|
||||
|
||||
|
||||
def test_check_txid_format():
|
||||
assert(check_txid_format(None) is False)
|
||||
assert(check_txid_format("") is False)
|
||||
assert(check_txid_format(0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef) is False) # wrong type
|
||||
assert(check_txid_format("abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd") is True) # lowercase
|
||||
assert(check_txid_format("ABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCD") is True) # uppercase
|
||||
assert(check_txid_format("0123456789abcdef0123456789ABCDEF0123456789abcdef0123456789ABCDEF") is True) # mixed case
|
||||
assert(check_txid_format("0123456789012345678901234567890123456789012345678901234567890123") is True) # only nums
|
||||
assert(check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdf") is False) # too short
|
||||
assert(check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0") is False) # too long
|
||||
assert(check_txid_format("g123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef") is False) # non-hex
|
||||
assert check_txid_format(None) is False
|
||||
assert check_txid_format("") is False
|
||||
assert check_txid_format(0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF) is False # wrong type
|
||||
assert check_txid_format("abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd") is True # lowercase
|
||||
assert check_txid_format("ABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCD") is True # uppercase
|
||||
assert check_txid_format("0123456789abcdef0123456789ABCDEF0123456789abcdef0123456789ABCDEF") is True # mixed case
|
||||
assert check_txid_format("0123456789012345678901234567890123456789012345678901234567890123") is True # only nums
|
||||
assert check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdf") is False # too short
|
||||
assert check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0") is False # too long
|
||||
assert check_txid_format("g123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef") is False # non-hex
|
||||
|
||||
@@ -15,8 +15,15 @@ from pisa.responder import Responder
|
||||
from pisa.tools import check_txid_format
|
||||
from pisa.utils.auth_proxy import AuthServiceProxy
|
||||
from test.unit.conftest import generate_block, generate_blocks, generate_dummy_appointment
|
||||
from pisa.conf import EXPIRY_DELTA, BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT, PISA_SECRET_KEY, \
|
||||
MAX_APPOINTMENTS
|
||||
from pisa.conf import (
|
||||
EXPIRY_DELTA,
|
||||
BTC_RPC_USER,
|
||||
BTC_RPC_PASSWD,
|
||||
BTC_RPC_HOST,
|
||||
BTC_RPC_PORT,
|
||||
PISA_SECRET_KEY,
|
||||
MAX_APPOINTMENTS,
|
||||
)
|
||||
|
||||
c_logger.disabled = True
|
||||
|
||||
@@ -42,8 +49,9 @@ def create_appointments(n):
|
||||
dispute_txs = []
|
||||
|
||||
for i in range(n):
|
||||
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
|
||||
end_time_offset=END_TIME_OFFSET)
|
||||
appointment, dispute_tx = generate_dummy_appointment(
|
||||
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
|
||||
)
|
||||
uuid = uuid4().hex
|
||||
|
||||
appointments[uuid] = appointment
|
||||
@@ -80,8 +88,9 @@ def test_add_appointment(run_bitcoind, watcher):
|
||||
|
||||
# We should be able to add appointments up to the limit
|
||||
for _ in range(10):
|
||||
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
|
||||
end_time_offset=END_TIME_OFFSET)
|
||||
appointment, dispute_tx = generate_dummy_appointment(
|
||||
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
|
||||
)
|
||||
added_appointment, sig = watcher.add_appointment(appointment)
|
||||
|
||||
assert added_appointment is True
|
||||
@@ -89,8 +98,7 @@ def test_add_appointment(run_bitcoind, watcher):
|
||||
|
||||
|
||||
def test_sign_appointment(watcher):
|
||||
appointment, _ = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
|
||||
end_time_offset=END_TIME_OFFSET)
|
||||
appointment, _ = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET)
|
||||
signature = watcher.sign_appointment(appointment)
|
||||
assert is_signature_valid(appointment, signature, public_key)
|
||||
|
||||
@@ -100,15 +108,17 @@ def test_add_too_many_appointments(watcher):
|
||||
watcher.appointments = dict()
|
||||
|
||||
for _ in range(MAX_APPOINTMENTS):
|
||||
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
|
||||
end_time_offset=END_TIME_OFFSET)
|
||||
appointment, dispute_tx = generate_dummy_appointment(
|
||||
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
|
||||
)
|
||||
added_appointment, sig = watcher.add_appointment(appointment)
|
||||
|
||||
assert added_appointment is True
|
||||
assert is_signature_valid(appointment, sig, public_key)
|
||||
|
||||
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
|
||||
end_time_offset=END_TIME_OFFSET)
|
||||
appointment, dispute_tx = generate_dummy_appointment(
|
||||
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
|
||||
)
|
||||
added_appointment, sig = watcher.add_appointment(appointment)
|
||||
|
||||
assert added_appointment is False
|
||||
|
||||
Reference in New Issue
Block a user