From 1eb4423e5881cc142e77a1bad5691fb65afa5661 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 15:56:49 +0000 Subject: [PATCH 01/36] Updates gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 27ae6f5..0a4ebe4 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,5 @@ test.py .cache .pytest_cache/ *.pem +.coverage +htmlcov From 8f7505c17aeca6fdec25a87d0962ce29a8f2f834 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 16:34:25 +0000 Subject: [PATCH 02/36] Improves generate_dummy_appointment --- test/unit/conftest.py | 12 ++++++++---- test/unit/test_builder.py | 4 ++-- test/unit/test_db_manager.py | 2 +- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/test/unit/conftest.py b/test/unit/conftest.py index 4f27d60..b76a367 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -73,8 +73,12 @@ def generate_blocks(n): generate_block() -def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30): - current_height = bitcoin_cli().getblockcount() +def generate_dummy_appointment_data(real_height=True, start_time_offset=5, end_time_offset=30): + if real_height: + current_height = bitcoin_cli().getblockcount() + + else: + current_height = 10 dispute_tx = TX.create_dummy_transaction() dispute_txid = sha256d(dispute_tx) @@ -110,9 +114,9 @@ def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30): return appointment_data, dispute_tx -def generate_dummy_appointment(start_time_offset=5, end_time_offset=30): +def generate_dummy_appointment(real_height=True, start_time_offset=5, end_time_offset=30): appointment_data, dispute_tx = generate_dummy_appointment_data( - start_time_offset=start_time_offset, end_time_offset=end_time_offset + real_height=real_height, start_time_offset=start_time_offset, end_time_offset=end_time_offset ) return Appointment.from_dict(appointment_data), dispute_tx diff --git a/test/unit/test_builder.py b/test/unit/test_builder.py index f9966cc..548ab2e 100644 --- a/test/unit/test_builder.py +++ b/test/unit/test_builder.py @@ -9,7 +9,7 @@ def test_build_appointments(): # Create some appointment data for i in range(10): - appointment, _ = generate_dummy_appointment() + appointment, _ = generate_dummy_appointment(real_height=False) uuid = uuid4().hex appointments_data[uuid] = appointment.to_dict() @@ -17,7 +17,7 @@ def test_build_appointments(): # Add some additional appointments that share the same locator to test all the builder's cases if i % 2 == 0: locator = appointment.locator - appointment, _ = generate_dummy_appointment() + appointment, _ = generate_dummy_appointment(real_height=False) uuid = uuid4().hex appointment.locator = locator diff --git a/test/unit/test_db_manager.py b/test/unit/test_db_manager.py index fff398d..28b7ba3 100644 --- a/test/unit/test_db_manager.py +++ b/test/unit/test_db_manager.py @@ -11,7 +11,7 @@ from pisa.db_manager import WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY, LO @pytest.fixture(scope="module") def watcher_appointments(): - return {uuid4().hex: generate_dummy_appointment()[0] for _ in range(10)} + return {uuid4().hex: generate_dummy_appointment(real_height=False)[0] for _ in range(10)} @pytest.fixture(scope="module") From 2b1640ea862786ecaf5066d8f76ba2cbea443d09 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 16:35:55 +0000 Subject: [PATCH 03/36] Simplifies Inspector using Appointment.from_json() constructor --- pisa/appointment.py | 3 ++- pisa/inspector.py | 24 ++++++++---------------- 2 files changed, 10 insertions(+), 17 deletions(-) diff --git a/pisa/appointment.py b/pisa/appointment.py index e81205a..61bd2e3 100644 --- a/pisa/appointment.py +++ b/pisa/appointment.py @@ -27,7 +27,8 @@ class Appointment: encrypted_blob_data = appointment_data.get("encrypted_blob") cipher = appointment_data.get("cipher") hash_function = appointment_data.get("hash_function") - triggered = appointment_data.get("triggered") + + triggered = True if appointment_data.get("triggered") is True else False if any( v is None diff --git a/pisa/inspector.py b/pisa/inspector.py index 68ac698..80ef214 100644 --- a/pisa/inspector.py +++ b/pisa/inspector.py @@ -16,34 +16,26 @@ logger = Logger("Inspector") class Inspector: def inspect(self, data): - locator = data.get("locator") - start_time = data.get("start_time") - end_time = data.get("end_time") - dispute_delta = data.get("dispute_delta") - encrypted_blob = data.get("encrypted_blob") - cipher = data.get("cipher") - hash_function = data.get("hash_function") - block_height = BlockProcessor.get_block_count() if block_height is not None: - rcode, message = self.check_locator(locator) + rcode, message = self.check_locator(data.get("locator")) if rcode == 0: - rcode, message = self.check_start_time(start_time, block_height) + rcode, message = self.check_start_time(data.get("start_time"), block_height) if rcode == 0: - rcode, message = self.check_end_time(end_time, start_time, block_height) + rcode, message = self.check_end_time(data.get("end_time"), data.get("start_time"), block_height) if rcode == 0: - rcode, message = self.check_delta(dispute_delta) + rcode, message = self.check_delta(data.get("dispute_delta")) if rcode == 0: - rcode, message = self.check_blob(encrypted_blob) + rcode, message = self.check_blob(data.get("encrypted_blob")) if rcode == 0: - rcode, message = self.check_cipher(cipher) + rcode, message = self.check_cipher(data.get("cipher")) if rcode == 0: - rcode, message = self.check_hash_function(hash_function) + rcode, message = self.check_hash_function(data.get("hash_function")) if rcode == 0: - r = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob, cipher, hash_function) + r = Appointment.from_dict(data) else: r = (rcode, message) From 82f9de1717d2705348eb99e686e9e668303f17bd Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 16:36:27 +0000 Subject: [PATCH 04/36] Adds missing Watcher test --- test/unit/test_watcher.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test/unit/test_watcher.py b/test/unit/test_watcher.py index 2c1e11f..ce693fb 100644 --- a/test/unit/test_watcher.py +++ b/test/unit/test_watcher.py @@ -81,6 +81,15 @@ def test_init(watcher): assert type(watcher.responder) is Responder +def test_init_no_key(db_manager): + try: + Watcher(db_manager, pisa_sk_file=None) + assert False + + except ValueError: + assert True + + def test_add_appointment(run_bitcoind, watcher): # The watcher automatically fires do_watch and do_subscribe on adding an appointment if it is asleep (initial state) # Avoid this by setting the state to awake. @@ -96,6 +105,12 @@ def test_add_appointment(run_bitcoind, watcher): assert added_appointment is True assert is_signature_valid(appointment, sig, public_key) + # Check that we can also add an already added appointment (same locator) + added_appointment, sig = watcher.add_appointment(appointment) + + assert added_appointment is True + assert is_signature_valid(appointment, sig, public_key) + def test_sign_appointment(watcher): appointment, _ = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET) From 8b2f0efa2fa17e91ecdf619dd0571645acd697b6 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 16:36:55 +0000 Subject: [PATCH 05/36] Moves appointment deletion + db update to the cleaner --- pisa/cleaner.py | 18 ++++++++++++++++++ pisa/watcher.py | 23 ++++++----------------- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/pisa/cleaner.py b/pisa/cleaner.py index 5ad772c..33af54a 100644 --- a/pisa/cleaner.py +++ b/pisa/cleaner.py @@ -25,6 +25,24 @@ class Cleaner: # Delete appointment from the db db_manager.delete_watcher_appointment(uuid) + @staticmethod + def delete_complete_appointment(appointments, locator_uuid_map, locator, uuid, db_manager): + # Delete the appointment + appointment = appointments.pop(uuid) + + # If there was only one appointment that matches the locator we can delete the whole list + if len(locator_uuid_map[locator]) == 1: + locator_uuid_map.pop(locator) + else: + # Otherwise we just delete the appointment that matches locator:appointment_pos + locator_uuid_map[locator].remove(uuid) + + # DISCUSS: instead of deleting the appointment, we will mark it as triggered and delete it from both + # the watcher's and responder's db after fulfilled + # Update appointment in the db + appointment.triggered = True + db_manager.store_watcher_appointment(uuid, appointment.to_json()) + @staticmethod def delete_completed_jobs(jobs, tx_job_map, completed_jobs, height, db_manager): for uuid, confirmations in completed_jobs: diff --git a/pisa/watcher.py b/pisa/watcher.py index 3db1ce7..03da907 100644 --- a/pisa/watcher.py +++ b/pisa/watcher.py @@ -18,7 +18,7 @@ logger = Logger("Watcher") class Watcher: - def __init__(self, db_manager, responder=None, max_appointments=MAX_APPOINTMENTS): + def __init__(self, db_manager, pisa_sk_file=PISA_SECRET_KEY, responder=None, max_appointments=MAX_APPOINTMENTS): self.appointments = dict() self.locator_uuid_map = dict() self.asleep = True @@ -30,7 +30,7 @@ class Watcher: if not isinstance(responder, Responder): self.responder = Responder(db_manager) - if PISA_SECRET_KEY is None: + if pisa_sk_file is None: raise ValueError("No signing key provided. Please fix your pisa.conf") else: with open(PISA_SECRET_KEY, "r") as key_file: @@ -137,21 +137,10 @@ class Watcher: block_hash, ) - # Delete the appointment - appointment = self.appointments.pop(uuid) - - # If there was only one appointment that matches the locator we can delete the whole list - if len(self.locator_uuid_map[locator]) == 1: - self.locator_uuid_map.pop(locator) - else: - # Otherwise we just delete the appointment that matches locator:appointment_pos - self.locator_uuid_map[locator].remove(uuid) - - # DISCUSS: instead of deleting the appointment, we will mark it as triggered and delete it from both - # the watcher's and responder's db after fulfilled - # Update appointment in the db - appointment.triggered = True - self.db_manager.store_watcher_appointment(uuid, appointment.to_json()) + # Delete the appointment and update db + Cleaner.delete_complete_appointment( + self.appointments, self.locator_uuid_map, locator, uuid, self.db_manager + ) # Register the last processed block for the watcher self.db_manager.store_last_block_hash_watcher(block_hash) From eb0417858eba81198c21bd6ba94d7a964989bccc Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 16:42:06 +0000 Subject: [PATCH 06/36] Adds missing test for EncryptedBlob --- test/unit/test_encrypted_blob.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/unit/test_encrypted_blob.py b/test/unit/test_encrypted_blob.py index 19098a8..3bc0d1d 100644 --- a/test/unit/test_encrypted_blob.py +++ b/test/unit/test_encrypted_blob.py @@ -11,6 +11,14 @@ def test_init_encrypted_blob(): assert EncryptedBlob(data).data == data +def test_equal(): + data = get_random_value_hex(64) + e_blob1 = EncryptedBlob(data) + e_blob2 = EncryptedBlob(data) + + assert e_blob1 == e_blob2 and id(e_blob1) != id(e_blob2) + + def test_decrypt(): # TODO: The decryption tests are assuming the cipher is AES-GCM-128, since EncryptedBlob assumes the same. Fix this. key = get_random_value_hex(32) From c384bc78c290c494421af3e79bf23244e6392b90 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 17:36:10 +0000 Subject: [PATCH 07/36] Changes code to use bitcoin_cli from pisa/tools --- test/unit/test_api.py | 8 +++----- test/unit/test_responder.py | 19 ++++++------------- test/unit/test_watcher.py | 17 +++-------------- 3 files changed, 12 insertions(+), 32 deletions(-) diff --git a/test/unit/test_api.py b/test/unit/test_api.py index 14c0c3f..8872811 100644 --- a/test/unit/test_api.py +++ b/test/unit/test_api.py @@ -3,9 +3,9 @@ import pytest import requests from pisa import HOST, PORT, c_logger -from pisa.utils.auth_proxy import AuthServiceProxy +from pisa.tools import bitcoin_cli from test.unit.conftest import generate_blocks, get_random_value_hex, generate_dummy_appointment_data -from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT, MAX_APPOINTMENTS +from pisa.conf import MAX_APPOINTMENTS c_logger.disabled = True @@ -117,12 +117,10 @@ def test_get_all_appointments_watcher(): def test_get_all_appointments_responder(): # Trigger all disputes - bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT)) - locators = [appointment["locator"] for appointment in appointments] for locator, dispute_tx in locator_dispute_tx_map.items(): if locator in locators: - bitcoin_cli.sendrawtransaction(dispute_tx) + bitcoin_cli().sendrawtransaction(dispute_tx) # Confirm transactions generate_blocks(6) diff --git a/test/unit/test_responder.py b/test/unit/test_responder.py index 7e82b89..592a7f6 100644 --- a/test/unit/test_responder.py +++ b/test/unit/test_responder.py @@ -5,13 +5,11 @@ from threading import Thread from queue import Queue, Empty from pisa import c_logger -from pisa.tools import check_txid_format +from pisa.tools import check_txid_format, bitcoin_cli from test.simulator.utils import sha256d from pisa.responder import Responder, Job from test.simulator.bitcoind_sim import TX -from pisa.utils.auth_proxy import AuthServiceProxy from test.unit.conftest import generate_block, generate_blocks, get_random_value_hex -from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT c_logger.disabled = True @@ -22,8 +20,6 @@ def responder(db_manager): def create_dummy_job_data(random_txid=False, justice_rawtx=None): - bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT)) - # The following transaction data corresponds to a valid transaction. For some test it may be interesting to have # some valid data, but for others we may need multiple different justice_txids. @@ -46,7 +42,7 @@ def create_dummy_job_data(random_txid=False, justice_rawtx=None): if random_txid is True: justice_txid = get_random_value_hex(32) - appointment_end = bitcoin_cli.getblockcount() + 2 + appointment_end = bitcoin_cli().getblockcount() + 2 return dispute_txid, justice_txid, justice_rawtx, appointment_end @@ -189,8 +185,6 @@ def test_do_watch(responder): responder.unconfirmed_txs = [] responder.missed_confirmations = dict() - bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT)) - jobs = [create_dummy_job(justice_rawtx=TX.create_dummy_transaction()) for _ in range(20)] # Let's set up the jobs first @@ -210,7 +204,7 @@ def test_do_watch(responder): # And broadcast some of the transactions broadcast_txs = [] for job in jobs[:5]: - bitcoin_cli.sendrawtransaction(job.justice_rawtx) + bitcoin_cli().sendrawtransaction(job.justice_rawtx) broadcast_txs.append(job.justice_txid) # Mine a block @@ -229,7 +223,7 @@ def test_do_watch(responder): # Do the rest broadcast_txs = [] for job in jobs[5:]: - bitcoin_cli.sendrawtransaction(job.justice_rawtx) + bitcoin_cli().sendrawtransaction(job.justice_rawtx) broadcast_txs.append(job.justice_txid) # Mine a block @@ -263,8 +257,7 @@ def test_get_txs_to_rebroadcast(responder): def test_get_completed_jobs(db_manager): - bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT)) - initial_height = bitcoin_cli.getblockcount() + initial_height = bitcoin_cli().getblockcount() # Let's use a fresh responder for this to make it easier to compare the results responder = Responder(db_manager) @@ -291,7 +284,7 @@ def test_get_completed_jobs(db_manager): responder.jobs.update(jobs_no_end) for uuid, job in responder.jobs.items(): - bitcoin_cli.sendrawtransaction(job.justice_rawtx) + bitcoin_cli().sendrawtransaction(job.justice_rawtx) # The dummy appointments have a end_appointment time of current + 2, but jobs need at least 6 confs by default generate_blocks(6) diff --git a/test/unit/test_watcher.py b/test/unit/test_watcher.py index ce693fb..abbf59e 100644 --- a/test/unit/test_watcher.py +++ b/test/unit/test_watcher.py @@ -12,18 +12,9 @@ from cryptography.exceptions import InvalidSignature from pisa import c_logger from pisa.watcher import Watcher from pisa.responder import Responder -from pisa.tools import check_txid_format -from pisa.utils.auth_proxy import AuthServiceProxy +from pisa.tools import check_txid_format, bitcoin_cli from test.unit.conftest import generate_block, generate_blocks, generate_dummy_appointment -from pisa.conf import ( - EXPIRY_DELTA, - BTC_RPC_USER, - BTC_RPC_PASSWD, - BTC_RPC_HOST, - BTC_RPC_PORT, - PISA_SECRET_KEY, - MAX_APPOINTMENTS, -) +from pisa.conf import EXPIRY_DELTA, PISA_SECRET_KEY, MAX_APPOINTMENTS c_logger.disabled = True @@ -157,8 +148,6 @@ def test_do_subscribe(watcher): def test_do_watch(watcher): - bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT)) - # We will wipe all the previous data and add 5 appointments watcher.appointments, watcher.locator_uuid_map, dispute_txs = create_appointments(APPOINTMENTS) @@ -168,7 +157,7 @@ def test_do_watch(watcher): # Broadcast the first two for dispute_tx in dispute_txs[:2]: - bitcoin_cli.sendrawtransaction(dispute_tx) + bitcoin_cli().sendrawtransaction(dispute_tx) # After leaving some time for the block to be mined and processed, the number of appointments should have reduced # by two From f4f77fb39a9aabed95d1d9c6282ae2118f5396bf Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 7 Nov 2019 18:16:14 +0000 Subject: [PATCH 08/36] Improves test appointment and add missing tests --- test/unit/test_appointment.py | 112 +++++++++++++++++++++++++--------- 1 file changed, 82 insertions(+), 30 deletions(-) diff --git a/test/unit/test_appointment.py b/test/unit/test_appointment.py index 7be6a60..e314d5e 100644 --- a/test/unit/test_appointment.py +++ b/test/unit/test_appointment.py @@ -20,7 +20,15 @@ def appointment_data(): cipher = "AES-GCM-128" hash_function = "SHA256" - return locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function + return { + "locator": locator, + "start_time": start_time, + "end_time": end_time, + "dispute_delta": dispute_delta, + "encrypted_blob": encrypted_blob_data, + "cipher": cipher, + "hash_function": hash_function, + } def test_init_appointment(appointment_data): @@ -28,51 +36,95 @@ def test_init_appointment(appointment_data): # creating appointments. # DISCUSS: whether this makes sense by design or checks should be ported from the inspector to the appointment # 35-appointment-checks - - locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function = appointment_data - - appointment = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function) + appointment = Appointment( + appointment_data["locator"], + appointment_data["start_time"], + appointment_data["end_time"], + appointment_data["dispute_delta"], + appointment_data["encrypted_blob"], + appointment_data["cipher"], + appointment_data["hash_function"], + ) assert ( - locator == appointment.locator - and start_time == appointment.start_time - and end_time == appointment.end_time - and EncryptedBlob(encrypted_blob_data) == appointment.encrypted_blob - and cipher == appointment.cipher - and dispute_delta == appointment.dispute_delta - and hash_function == appointment.hash_function + appointment_data["locator"] == appointment.locator + and appointment_data["start_time"] == appointment.start_time + and appointment_data["end_time"] == appointment.end_time + and appointment_data["dispute_delta"] == appointment.dispute_delta + and EncryptedBlob(appointment_data["encrypted_blob"]) == appointment.encrypted_blob + and appointment_data["cipher"] == appointment.cipher + and appointment_data["hash_function"] == appointment.hash_function ) def test_to_dict(appointment_data): - locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function = appointment_data - appointment = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function) + appointment = Appointment( + appointment_data["locator"], + appointment_data["start_time"], + appointment_data["end_time"], + appointment_data["dispute_delta"], + appointment_data["encrypted_blob"], + appointment_data["cipher"], + appointment_data["hash_function"], + ) dict_appointment = appointment.to_dict() assert ( - locator == dict_appointment.get("locator") - and start_time == dict_appointment.get("start_time") - and end_time == dict_appointment.get("end_time") - and dispute_delta == dict_appointment.get("dispute_delta") - and cipher == dict_appointment.get("cipher") - and hash_function == dict_appointment.get("hash_function") - and encrypted_blob_data == dict_appointment.get("encrypted_blob") + appointment_data["locator"] == dict_appointment["locator"] + and appointment_data["start_time"] == dict_appointment["start_time"] + and appointment_data["end_time"] == dict_appointment["end_time"] + and appointment_data["dispute_delta"] == dict_appointment["dispute_delta"] + and EncryptedBlob(appointment_data["encrypted_blob"]) == EncryptedBlob(dict_appointment["encrypted_blob"]) + and appointment_data["cipher"] == dict_appointment["cipher"] + and appointment_data["hash_function"] == dict_appointment["hash_function"] ) def test_to_json(appointment_data): - locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function = appointment_data - appointment = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function) + appointment = Appointment( + appointment_data["locator"], + appointment_data["start_time"], + appointment_data["end_time"], + appointment_data["dispute_delta"], + appointment_data["encrypted_blob"], + appointment_data["cipher"], + appointment_data["hash_function"], + ) dict_appointment = json.loads(appointment.to_json()) assert ( - locator == dict_appointment.get("locator") - and start_time == dict_appointment.get("start_time") - and end_time == dict_appointment.get("end_time") - and dispute_delta == dict_appointment.get("dispute_delta") - and cipher == dict_appointment.get("cipher") - and hash_function == dict_appointment.get("hash_function") - and encrypted_blob_data == dict_appointment.get("encrypted_blob") + appointment_data["locator"] == dict_appointment["locator"] + and appointment_data["start_time"] == dict_appointment["start_time"] + and appointment_data["end_time"] == dict_appointment["end_time"] + and appointment_data["dispute_delta"] == dict_appointment["dispute_delta"] + and EncryptedBlob(appointment_data["encrypted_blob"]) == EncryptedBlob(dict_appointment["encrypted_blob"]) + and appointment_data["cipher"] == dict_appointment["cipher"] + and appointment_data["hash_function"] == dict_appointment["hash_function"] ) + + +def test_from_dict(appointment_data): + # The appointment should be build if we don't miss any field + appointment = Appointment.from_dict(appointment_data) + assert isinstance(appointment, Appointment) + + # Otherwise it should fail + appointment_data["hash_function"] = None + + try: + Appointment.from_dict(appointment_data) + assert False + + except ValueError: + assert True + + +def test_serialize(appointment_data): + appointment = Appointment.from_dict(appointment_data) + + assert appointment.triggered is False + ser_appointment = appointment.serialize() + + assert ser_appointment == json.dumps(appointment_data, sort_keys=True, separators=(",", ":")).encode("utf-8") From c08013abd0c3972d3861745be7afc3d5ec02b03a Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 14:06:06 +0000 Subject: [PATCH 09/36] Creates Cryptographer. Separates decryptiomn from encrypted_blob --- pisa/cryptographer.py | 50 ++++++++++++++++++++++++++++++++++++++ pisa/encrypted_blob.py | 55 +++++++++++------------------------------- 2 files changed, 64 insertions(+), 41 deletions(-) create mode 100644 pisa/cryptographer.py diff --git a/pisa/cryptographer.py b/pisa/cryptographer.py new file mode 100644 index 0000000..25d545c --- /dev/null +++ b/pisa/cryptographer.py @@ -0,0 +1,50 @@ +from hashlib import sha256 +from binascii import unhexlify, hexlify +from cryptography.exceptions import InvalidTag +from cryptography.hazmat.primitives.ciphers.aead import AESGCM + +from pisa.logger import Logger + +logger = Logger("Cryptographer") + + +# FIXME: Cryptographer is assuming AES-128-GCM and SHA256 since they are the only pair accepted by the encrypted blob +# and the only pair programmed so far. +class Cryptographer: + @staticmethod + # ToDo: #20-test-tx-decrypting-edge-cases + def decrypt(encrypted_blob, key, rtype="hex"): + if rtype not in ["hex", "bytes"]: + raise ValueError("Wrong return type. Return type must be 'hex' or 'bytes'") + + # master_key = H(tx_id | tx_id) + key = unhexlify(key) + master_key = sha256(key + key).digest() + + # The 16 MSB of the master key will serve as the AES GCM 128 secret key. The 16 LSB will serve as the IV. + sk = master_key[:16] + nonce = master_key[16:] + + logger.info( + "Creating new blob.", + master_key=hexlify(master_key).decode(), + sk=hexlify(sk).decode(), + nonce=hexlify(sk).decode(), + encrypted_blob=encrypted_blob.data, + ) + + # Decrypt + cipher = AESGCM(sk) + data = unhexlify(encrypted_blob.data.encode()) + + try: + blob = cipher.decrypt(nonce=nonce, data=data, associated_data=None) + + # Change the blob encoding to hex depending on the rtype (default) + if rtype == "hex": + blob = hexlify(blob).decode("utf8") + + except InvalidTag: + blob = None + + return blob diff --git a/pisa/encrypted_blob.py b/pisa/encrypted_blob.py index dd58a39..36f51f7 100644 --- a/pisa/encrypted_blob.py +++ b/pisa/encrypted_blob.py @@ -1,48 +1,21 @@ -from hashlib import sha256 -from binascii import unhexlify, hexlify -from cryptography.exceptions import InvalidTag -from cryptography.hazmat.primitives.ciphers.aead import AESGCM - -from pisa.logger import Logger - -logger = Logger("Watcher") +from pisa.conf import SUPPORTED_CIPHERS, SUPPORTED_HASH_FUNCTIONS -# FIXME: EncryptedBlob is assuming AES-128-GCM. A cipher field should be part of the object and the decryption should be -# performed depending on the cipher. class EncryptedBlob: - def __init__(self, data): + def __init__(self, data, cipher="AES-GCM-128", hash_function="SHA256"): + if cipher in SUPPORTED_CIPHERS: + self.cipher = cipher + + else: + raise ValueError("Cipher not supported") + + if hash_function in SUPPORTED_HASH_FUNCTIONS: + self.hash_function = hash_function + + else: + raise ValueError("Hash function not supported") + self.data = data def __eq__(self, other): return isinstance(other, EncryptedBlob) and self.data == other.data - - def decrypt(self, key): - # master_key = H(tx_id | tx_id) - key = unhexlify(key) - master_key = sha256(key + key).digest() - - # The 16 MSB of the master key will serve as the AES GCM 128 secret key. The 16 LSB will serve as the IV. - sk = master_key[:16] - nonce = master_key[16:] - - logger.info( - "Creating new blob.", - master_key=hexlify(master_key).decode(), - sk=hexlify(sk).decode(), - nonce=hexlify(sk).decode(), - encrypted_blob=self.data, - ) - - # Decrypt - aesgcm = AESGCM(sk) - data = unhexlify(self.data.encode()) - - try: - raw_tx = aesgcm.decrypt(nonce=nonce, data=data, associated_data=None) - hex_raw_tx = hexlify(raw_tx).decode("utf8") - - except InvalidTag: - hex_raw_tx = None - - return hex_raw_tx From 20faa04c4cbe6623d415c4af22629dfe147624a7 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 14:16:02 +0000 Subject: [PATCH 10/36] Refactord Watcher and BlockProcessor. Closes #36. Brings back check_potential_matches and check_matches to the Watcher. The methods have now been renamed to check_matches (old check_potential_matches) and filter_valid_matches (old check_matches) to provide a better description of their purpose. decode_raw_transaction has been added to BlockProcessor --- pisa/block_processor.py | 58 ++++++--------------------- pisa/watcher.py | 86 ++++++++++++++++++++++++++++++++++------- 2 files changed, 83 insertions(+), 61 deletions(-) diff --git a/pisa/block_processor.py b/pisa/block_processor.py index ea3d87d..e3a2bc6 100644 --- a/pisa/block_processor.py +++ b/pisa/block_processor.py @@ -1,6 +1,3 @@ -import binascii -from hashlib import sha256 - from pisa.logger import Logger from pisa.tools import bitcoin_cli from pisa.utils.auth_proxy import JSONRPCException @@ -45,6 +42,18 @@ class BlockProcessor: return block_count + @staticmethod + def decode_raw_transaction(raw_tx): + + try: + tx = bitcoin_cli().decoderawtransaction(raw_tx) + + except JSONRPCException as e: + tx = None + logger.error("Can't build transaction from decoded data.", error=e.error) + + return tx + def get_missed_blocks(self, last_know_block_hash): current_block_hash = self.get_best_block_hash() missed_blocks = [] @@ -72,49 +81,6 @@ class BlockProcessor: return distance - # FIXME: The following two functions does not seem to belong here. They come from the Watcher, and need to be - # separated since they will be reused by the TimeTraveller. - # DISCUSS: 36-who-should-check-appointment-trigger - @staticmethod - def get_potential_matches(txids, locator_uuid_map): - potential_locators = {sha256(binascii.unhexlify(txid)).hexdigest(): txid for txid in txids} - - # Check is any of the tx_ids in the received block is an actual match - intersection = set(locator_uuid_map.keys()).intersection(potential_locators.keys()) - potential_matches = {locator: potential_locators[locator] for locator in intersection} - - if len(potential_matches) > 0: - logger.info("List of potential matches", potential_matches=potential_matches) - - else: - logger.info("No potential matches found") - - return potential_matches - - @staticmethod - # NOTCOVERED - def get_matches(potential_matches, locator_uuid_map, appointments): - matches = [] - - for locator, dispute_txid in potential_matches.items(): - for uuid in locator_uuid_map[locator]: - try: - # ToDo: #20-test-tx-decrypting-edge-cases - justice_rawtx = appointments[uuid].encrypted_blob.decrypt(dispute_txid) - justice_txid = bitcoin_cli().decoderawtransaction(justice_rawtx).get("txid") - logger.info("Match found for locator.", locator=locator, uuid=uuid, justice_txid=justice_txid) - - except JSONRPCException as e: - # Tx decode failed returns error code -22, maybe we should be more strict here. Leaving it simple - # for the POC - justice_txid = None - justice_rawtx = None - logger.error("Can't build transaction from decoded data.", error=e.error) - - matches.append((locator, uuid, dispute_txid, justice_txid, justice_rawtx)) - - return matches - # DISCUSS: This method comes from the Responder and seems like it could go back there. @staticmethod # NOTCOVERED diff --git a/pisa/watcher.py b/pisa/watcher.py index 03da907..f30acd1 100644 --- a/pisa/watcher.py +++ b/pisa/watcher.py @@ -1,18 +1,22 @@ from uuid import uuid4 from queue import Queue +from hashlib import sha256 from threading import Thread +from binascii import unhexlify -from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.serialization import load_pem_private_key +from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.serialization import load_pem_private_key from pisa.logger import Logger from pisa.cleaner import Cleaner -from pisa.conf import EXPIRY_DELTA, MAX_APPOINTMENTS, PISA_SECRET_KEY from pisa.responder import Responder +from pisa.appointment import Appointment +from pisa.cryptographer import Cryptographer from pisa.block_processor import BlockProcessor from pisa.utils.zmq_subscriber import ZMQHandler +from pisa.conf import EXPIRY_DELTA, MAX_APPOINTMENTS, PISA_SECRET_KEY logger = Logger("Watcher") @@ -115,35 +119,35 @@ class Watcher: expired_appointments, self.appointments, self.locator_uuid_map, self.db_manager ) - potential_matches = BlockProcessor.get_potential_matches(txids, self.locator_uuid_map) - matches = BlockProcessor.get_matches(potential_matches, self.locator_uuid_map, self.appointments) + matches = self.get_matches(txids, self.locator_uuid_map) + filtered_matches = self.filter_valid_matches(matches, self.locator_uuid_map, self.appointments) - for locator, uuid, dispute_txid, justice_txid, justice_rawtx in matches: + for uuid, filtered_match in filtered_matches.items(): # Errors decrypting the Blob will result in a None justice_txid - if justice_txid is not None: + if filtered_match["valid_match"] is True: logger.info( "Notifying responder and deleting appointment.", - justice_txid=justice_txid, - locator=locator, + justice_txid=filtered_match["justice_txid"], + locator=filtered_match["locator"], uuid=uuid, ) self.responder.add_response( uuid, - dispute_txid, - justice_txid, - justice_rawtx, + filtered_match["dispute_txid"], + filtered_match["justice_txid"], + filtered_match["justice_rawtx"], self.appointments[uuid].end_time, block_hash, ) # Delete the appointment and update db Cleaner.delete_complete_appointment( - self.appointments, self.locator_uuid_map, locator, uuid, self.db_manager + self.appointments, self.locator_uuid_map, filtered_match["locator"], uuid, self.db_manager ) - # Register the last processed block for the watcher - self.db_manager.store_last_block_hash_watcher(block_hash) + # Register the last processed block for the watcher + self.db_manager.store_last_block_hash_watcher(block_hash) # Go back to sleep if there are no more appointments self.asleep = True @@ -151,3 +155,55 @@ class Watcher: self.block_queue = Queue() logger.info("No more pending appointments, going back to sleep") + + @staticmethod + def compute_locator(tx_id): + return sha256(unhexlify(tx_id)).hexdigest() + + # DISCUSS: 36-who-should-check-appointment-trigger + @staticmethod + def get_matches(txids, locator_uuid_map): + potential_locators = {Watcher.compute_locator(txid): txid for txid in txids} + + # Check is any of the tx_ids in the received block is an actual match + intersection = set(locator_uuid_map.keys()).intersection(potential_locators.keys()) + matches = {locator: potential_locators[locator] for locator in intersection} + + if len(matches) > 0: + logger.info("List of matches", potential_matches=matches) + + else: + logger.info("No matches found") + + return matches + + @staticmethod + # NOTCOVERED + def filter_valid_matches(matches, locator_uuid_map, appointments): + filtered_matches = {} + + for locator, dispute_txid in matches.items(): + for uuid in locator_uuid_map[locator]: + + justice_rawtx = Cryptographer.decrypt(appointments[uuid].encrypted_blob, dispute_txid) + justice_tx = BlockProcessor.decode_raw_transaction(justice_rawtx) + + if justice_rawtx is not None: + justice_txid = justice_tx.get("txid") + valid_match = True + + logger.info("Match found for locator.", locator=locator, uuid=uuid, justice_txid=justice_txid) + + else: + justice_txid = None + valid_match = False + + filtered_matches[uuid] = { + "locator": locator, + "dispute_txid": dispute_txid, + "justice_txid": justice_txid, + "justice_rawtx": justice_rawtx, + "valid_match": valid_match, + } + + return filtered_matches From 8a10979db09b854af41e2e6f1dc99fc498ed7707 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 14:23:06 +0000 Subject: [PATCH 11/36] Refactors Responder and BlockProcessor Brings check_confirmations back to the Responder --- pisa/block_processor.py | 22 ---------------------- pisa/responder.py | 24 +++++++++++++++++++++--- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/pisa/block_processor.py b/pisa/block_processor.py index e3a2bc6..c44ead1 100644 --- a/pisa/block_processor.py +++ b/pisa/block_processor.py @@ -80,25 +80,3 @@ class BlockProcessor: distance = chain_tip_height - target_block_height return distance - - # DISCUSS: This method comes from the Responder and seems like it could go back there. - @staticmethod - # NOTCOVERED - def check_confirmations(txs, unconfirmed_txs, tx_job_map, missed_confirmations): - - for tx in txs: - if tx in tx_job_map and tx in unconfirmed_txs: - unconfirmed_txs.remove(tx) - - logger.info("Confirmation received for transaction", tx=tx) - - elif tx in unconfirmed_txs: - if tx in missed_confirmations: - missed_confirmations[tx] += 1 - - else: - missed_confirmations[tx] = 1 - - logger.info("Transaction missed a confirmation", tx=tx, missed_confirmations=missed_confirmations[tx]) - - return unconfirmed_txs, missed_confirmations diff --git a/pisa/responder.py b/pisa/responder.py index 4452a48..ecd516b 100644 --- a/pisa/responder.py +++ b/pisa/responder.py @@ -153,9 +153,7 @@ class Responder: # ToDo: #9-add-data-persistence if prev_block_hash == block.get("previousblockhash"): - self.unconfirmed_txs, self.missed_confirmations = BlockProcessor.check_confirmations( - txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations - ) + self.check_confirmations(txs) txs_to_rebroadcast = self.get_txs_to_rebroadcast(txs) completed_jobs = self.get_completed_jobs(height) @@ -186,6 +184,26 @@ class Responder: logger.info("No more pending jobs, going back to sleep") + # NOTCOVERED + def check_confirmations(self, txs): + + for tx in txs: + if tx in self.tx_job_map and tx in self.unconfirmed_txs: + self.unconfirmed_txs.remove(tx) + + logger.info("Confirmation received for transaction", tx=tx) + + elif tx in self.unconfirmed_txs: + if tx in self.missed_confirmations: + self.missed_confirmations[tx] += 1 + + else: + self.missed_confirmations[tx] = 1 + + logger.info( + "Transaction missed a confirmation", tx=tx, missed_confirmations=self.missed_confirmations[tx] + ) + def get_txs_to_rebroadcast(self, txs): txs_to_rebroadcast = [] From da61ea9b8ef5c926fb5c4e6d68f91f5aad7e6510 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 18:20:05 +0000 Subject: [PATCH 12/36] Makes sure that blobs to be decrypred are even-length --- pisa/cryptographer.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pisa/cryptographer.py b/pisa/cryptographer.py index 25d545c..efe493c 100644 --- a/pisa/cryptographer.py +++ b/pisa/cryptographer.py @@ -17,6 +17,12 @@ class Cryptographer: if rtype not in ["hex", "bytes"]: raise ValueError("Wrong return type. Return type must be 'hex' or 'bytes'") + if len(encrypted_blob.data) % 2: + logger.info( + "Incorrect (Odd-length) value to be decrypted.", encrypted_blob=encrypted_blob.data, dispute_txid=key + ) + return None + # master_key = H(tx_id | tx_id) key = unhexlify(key) master_key = sha256(key + key).digest() From e65b02c4732cb2a0948bc71c3430f3d186a5b017 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 18:20:25 +0000 Subject: [PATCH 13/36] Adds Cryptographer unit tests --- test/unit/test_cryptographer.py | 51 +++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 test/unit/test_cryptographer.py diff --git a/test/unit/test_cryptographer.py b/test/unit/test_cryptographer.py new file mode 100644 index 0000000..09b862a --- /dev/null +++ b/test/unit/test_cryptographer.py @@ -0,0 +1,51 @@ +import binascii + +from pisa.cryptographer import Cryptographer +from pisa.encrypted_blob import EncryptedBlob +from test.unit.conftest import get_random_value_hex + +data = "6097cdf52309b1b2124efeed36bd34f46dc1c25ad23ac86f28380f746254f777" +key = "b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659" +encrypted_data = "092e93d4a34aac4367075506f2c050ddfa1a201ee6669b65058572904dcea642aeb01ea4b57293618e8c46809dfadadc" +encrypted_blob = EncryptedBlob(encrypted_data) + + +# TODO: The decryption tests are assuming the cipher is AES-GCM-128, since EncryptedBlob assumes the same. Fix this. +def test_decrypt_wrong_data(): + random_key = get_random_value_hex(32) + random_encrypted_data = get_random_value_hex(64) + random_encrypted_blob = EncryptedBlob(random_encrypted_data) + + # Trying to decrypt random data (in AES_GCM-128) should result in an InvalidTag exception. Our decrypt function + # returns None + hex_tx = Cryptographer.decrypt(random_encrypted_blob, random_key) + assert hex_tx is None + + +def test_decrypt_odd_length(): + random_key = get_random_value_hex(32) + random_encrypted_data_odd = get_random_value_hex(64)[:-1] + random_encrypted_blob_odd = EncryptedBlob(random_encrypted_data_odd) + + assert Cryptographer.decrypt(random_encrypted_blob_odd, random_key) is None + + +def test_decrypt_hex(): + # Valid data should run with no InvalidTag and verify + assert Cryptographer.decrypt(encrypted_blob, key) == data + + +def test_decrypt_bytes(): + # We can also get the decryption in bytes + byte_blob = Cryptographer.decrypt(encrypted_blob, key, rtype="bytes") + assert isinstance(byte_blob, bytes) and byte_blob == binascii.unhexlify(data) + + +def test_decrypt_wrong_return(): + # Any other type but "hex" (default) or "bytes" should fail + try: + Cryptographer.decrypt(encrypted_blob, key, rtype="random_value") + assert False + + except ValueError: + assert True From 4233c9d5cc346f557913395c1c7c6edeba0a3cc9 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 18:21:42 +0000 Subject: [PATCH 14/36] Refactor get_matches and filter_valid_matches to not be static --- pisa/watcher.py | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/pisa/watcher.py b/pisa/watcher.py index f30acd1..60d6ee1 100644 --- a/pisa/watcher.py +++ b/pisa/watcher.py @@ -12,7 +12,6 @@ from cryptography.hazmat.primitives.serialization import load_pem_private_key from pisa.logger import Logger from pisa.cleaner import Cleaner from pisa.responder import Responder -from pisa.appointment import Appointment from pisa.cryptographer import Cryptographer from pisa.block_processor import BlockProcessor from pisa.utils.zmq_subscriber import ZMQHandler @@ -41,6 +40,10 @@ class Watcher: secret_key_pem = key_file.read().encode("utf-8") self.signing_key = load_pem_private_key(secret_key_pem, password=None, backend=default_backend()) + @staticmethod + def compute_locator(tx_id): + return sha256(unhexlify(tx_id)).hexdigest() + def sign_appointment(self, appointment): data = appointment.serialize() return self.signing_key.sign(data, ec.ECDSA(hashes.SHA256())) @@ -119,8 +122,7 @@ class Watcher: expired_appointments, self.appointments, self.locator_uuid_map, self.db_manager ) - matches = self.get_matches(txids, self.locator_uuid_map) - filtered_matches = self.filter_valid_matches(matches, self.locator_uuid_map, self.appointments) + filtered_matches = self.filter_valid_matches(self.get_matches(txids)) for uuid, filtered_match in filtered_matches.items(): # Errors decrypting the Blob will result in a None justice_txid @@ -156,17 +158,11 @@ class Watcher: logger.info("No more pending appointments, going back to sleep") - @staticmethod - def compute_locator(tx_id): - return sha256(unhexlify(tx_id)).hexdigest() - - # DISCUSS: 36-who-should-check-appointment-trigger - @staticmethod - def get_matches(txids, locator_uuid_map): + def get_matches(self, txids): potential_locators = {Watcher.compute_locator(txid): txid for txid in txids} # Check is any of the tx_ids in the received block is an actual match - intersection = set(locator_uuid_map.keys()).intersection(potential_locators.keys()) + intersection = set(self.locator_uuid_map.keys()).intersection(potential_locators.keys()) matches = {locator: potential_locators[locator] for locator in intersection} if len(matches) > 0: @@ -177,18 +173,16 @@ class Watcher: return matches - @staticmethod - # NOTCOVERED - def filter_valid_matches(matches, locator_uuid_map, appointments): + def filter_valid_matches(self, matches): filtered_matches = {} for locator, dispute_txid in matches.items(): - for uuid in locator_uuid_map[locator]: + for uuid in self.locator_uuid_map[locator]: - justice_rawtx = Cryptographer.decrypt(appointments[uuid].encrypted_blob, dispute_txid) + justice_rawtx = Cryptographer.decrypt(self.appointments[uuid].encrypted_blob, dispute_txid) justice_tx = BlockProcessor.decode_raw_transaction(justice_rawtx) - if justice_rawtx is not None: + if justice_tx is not None: justice_txid = justice_tx.get("txid") valid_match = True From 61a36048de8763718bb41cb2e44bfb08e5f01654 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 18:23:24 +0000 Subject: [PATCH 15/36] Removes redundant tests --- test/unit/test_block_processor.py | 42 ------------------------------- test/unit/test_encrypted_blob.py | 20 --------------- 2 files changed, 62 deletions(-) diff --git a/test/unit/test_block_processor.py b/test/unit/test_block_processor.py index bb3b841..ab16736 100644 --- a/test/unit/test_block_processor.py +++ b/test/unit/test_block_processor.py @@ -1,7 +1,4 @@ import pytest -from uuid import uuid4 -from hashlib import sha256 -from binascii import unhexlify from pisa import c_logger from pisa.block_processor import BlockProcessor @@ -9,19 +6,6 @@ from test.unit.conftest import get_random_value_hex c_logger.disabled = True -APPOINTMENT_COUNT = 100 -TEST_SET_SIZE = 200 - - -@pytest.fixture(scope="module") -def txids(): - return [get_random_value_hex(32) for _ in range(APPOINTMENT_COUNT)] - - -@pytest.fixture(scope="module") -def locator_uuid_map(txids): - return {sha256(unhexlify(txid)).hexdigest(): uuid4().hex for txid in txids} - @pytest.fixture def best_block_hash(): @@ -52,29 +36,3 @@ def test_get_random_block(): def test_get_block_count(): block_count = BlockProcessor.get_block_count() assert isinstance(block_count, int) and block_count >= 0 - - -def test_potential_matches(txids, locator_uuid_map): - potential_matches = BlockProcessor.get_potential_matches(txids, locator_uuid_map) - - # All the txids must match - assert locator_uuid_map.keys() == potential_matches.keys() - - -def test_potential_matches_random(locator_uuid_map): - txids = [get_random_value_hex(32) for _ in range(len(locator_uuid_map))] - - potential_matches = BlockProcessor.get_potential_matches(txids, locator_uuid_map) - - # None of the ids should match - assert len(potential_matches) == 0 - - -def test_potential_matches_random_data(locator_uuid_map): - # The likelihood of finding a potential match with random data should be negligible - txids = [get_random_value_hex(32) for _ in range(TEST_SET_SIZE)] - - potential_matches = BlockProcessor.get_potential_matches(txids, locator_uuid_map) - - # None of the txids should match - assert len(potential_matches) == 0 diff --git a/test/unit/test_encrypted_blob.py b/test/unit/test_encrypted_blob.py index 3bc0d1d..33b4ece 100644 --- a/test/unit/test_encrypted_blob.py +++ b/test/unit/test_encrypted_blob.py @@ -17,23 +17,3 @@ def test_equal(): e_blob2 = EncryptedBlob(data) assert e_blob1 == e_blob2 and id(e_blob1) != id(e_blob2) - - -def test_decrypt(): - # TODO: The decryption tests are assuming the cipher is AES-GCM-128, since EncryptedBlob assumes the same. Fix this. - key = get_random_value_hex(32) - encrypted_data = get_random_value_hex(64) - encrypted_blob = EncryptedBlob(encrypted_data) - - # Trying to decrypt random data (in AES_GCM-128) should result in an InvalidTag exception. Our decrypt function - # returns None - hex_tx = encrypted_blob.decrypt(key) - assert hex_tx is None - - # Valid data should run with no InvalidTag and verify - data = "6097cdf52309b1b2124efeed36bd34f46dc1c25ad23ac86f28380f746254f777" - key = "b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659" - encrypted_data = "092e93d4a34aac4367075506f2c050ddfa1a201ee6669b65058572904dcea642aeb01ea4b57293618e8c46809dfadadc" - encrypted_blob = EncryptedBlob(encrypted_data) - - assert encrypted_blob.decrypt(key) == data From a1a1206e0c4ecf94dee0d5ecf71fe480b2d2c808 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 8 Nov 2019 18:25:14 +0000 Subject: [PATCH 16/36] Adds new Watcher unit tests --- test/unit/test_watcher.py | 87 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/test/unit/test_watcher.py b/test/unit/test_watcher.py index abbf59e..bf5d7dd 100644 --- a/test/unit/test_watcher.py +++ b/test/unit/test_watcher.py @@ -1,6 +1,8 @@ import pytest from uuid import uuid4 +from hashlib import sha256 from threading import Thread +from binascii import unhexlify from queue import Queue, Empty from cryptography.hazmat.backends import default_backend @@ -13,7 +15,7 @@ from pisa import c_logger from pisa.watcher import Watcher from pisa.responder import Responder from pisa.tools import check_txid_format, bitcoin_cli -from test.unit.conftest import generate_block, generate_blocks, generate_dummy_appointment +from test.unit.conftest import generate_block, generate_blocks, generate_dummy_appointment, get_random_value_hex from pisa.conf import EXPIRY_DELTA, PISA_SECRET_KEY, MAX_APPOINTMENTS c_logger.disabled = True @@ -21,6 +23,7 @@ c_logger.disabled = True APPOINTMENTS = 5 START_TIME_OFFSET = 1 END_TIME_OFFSET = 1 +TEST_SET_SIZE = 200 with open(PISA_SECRET_KEY, "r") as key_file: pubkey_pem = key_file.read().encode("utf-8") @@ -34,6 +37,16 @@ def watcher(db_manager): return Watcher(db_manager) +@pytest.fixture(scope="module") +def txids(): + return [get_random_value_hex(32) for _ in range(100)] + + +@pytest.fixture(scope="module") +def locator_uuid_map(txids): + return {sha256(unhexlify(txid)).hexdigest(): uuid4().hex for txid in txids} + + def create_appointments(n): locator_uuid_map = dict() appointments = dict() @@ -171,3 +184,75 @@ def test_do_watch(watcher): assert len(watcher.appointments) == 0 assert watcher.asleep is True + + +def test_matches(watcher, txids, locator_uuid_map): + watcher.locator_uuid_map = locator_uuid_map + potential_matches = watcher.get_matches(txids) + + # All the txids must match + assert locator_uuid_map.keys() == potential_matches.keys() + + +def test_matches_random_data(watcher, locator_uuid_map): + # The likelihood of finding a potential match with random data should be negligible + watcher.locator_uuid_map = locator_uuid_map + txids = [get_random_value_hex(32) for _ in range(TEST_SET_SIZE)] + + potential_matches = watcher.get_matches(txids) + + # None of the txids should match + assert len(potential_matches) == 0 + + +def test_filter_valid_matches_random_data(watcher): + appointments = {} + locator_uuid_map = {} + matches = {} + + for i in range(TEST_SET_SIZE): + dummy_appointment, _ = generate_dummy_appointment() + uuid = uuid4().hex + appointments[uuid] = dummy_appointment + + locator_uuid_map[dummy_appointment.locator] = [uuid] + + if i % 2: + dispute_txid = get_random_value_hex(32) + matches[dummy_appointment.locator] = dispute_txid + + watcher.locator_uuid_map = locator_uuid_map + watcher.appointments = appointments + + filtered_valid_matches = watcher.filter_valid_matches(matches) + + assert not any([fil_match["valid_match"] for uuid, fil_match in filtered_valid_matches.items()]) + + +def test_filter_valid_matches(watcher): + dispute_txid = "0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9" + encrypted_blob = ( + "29f55518945408f567bb7feb4d7bb15ba88b7d8ca0223a44d5c67dfe32d038caee7613e35736025d95ad4ecd6538a50" + "74cbe8d7739705697a5dc4d19b8a6e4459ed2d1b0d0a9b18c49bc2187dcbfb4046b14d58a1add83235fc632efc398d5" + "0abcb7738f1a04b3783d025c1828b4e8a8dc8f13f2843e6bc3bf08eade02fc7e2c4dce7d2f83b055652e944ac114e0b" + "72a9abcd98fd1d785a5d976c05ed780e033e125fa083c6591b6029aa68dbc099f148a2bc2e0cb63733e68af717d48d5" + "a312b5f5b2fcca9561b2ff4191f9cdff936a43f6efef4ee45fbaf1f18d0a4b006f3fc8399dd8ecb21f709d4583bba14" + "4af6d49fa99d7be2ca21059a997475aa8642b66b921dc7fc0321b6a2f6927f6f9bab55c75e17a19dc3b2ae895b6d4a4" + "f64f8eb21b1e" + ) + + dummy_appointment, _ = generate_dummy_appointment() + dummy_appointment.encrypted_blob.data = encrypted_blob + dummy_appointment.locator = sha256(unhexlify(dispute_txid)).hexdigest() + uuid = uuid4().hex + + appointments = {uuid: dummy_appointment} + locator_uuid_map = {dummy_appointment.locator: [uuid]} + matches = {dummy_appointment.locator: dispute_txid} + + watcher.appointments = appointments + watcher.locator_uuid_map = locator_uuid_map + + filtered_valid_matches = watcher.filter_valid_matches(matches) + + assert all([fil_match["valid_match"] for uuid, fil_match in filtered_valid_matches.items()]) From 3c6f13ef0047bce851e99d45f81e25032ae1d792 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 11 Nov 2019 13:27:57 +0000 Subject: [PATCH 17/36] Rename delete_completed_appointments and rearange arguments --- pisa/cleaner.py | 2 +- pisa/watcher.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pisa/cleaner.py b/pisa/cleaner.py index 33af54a..88177bc 100644 --- a/pisa/cleaner.py +++ b/pisa/cleaner.py @@ -26,7 +26,7 @@ class Cleaner: db_manager.delete_watcher_appointment(uuid) @staticmethod - def delete_complete_appointment(appointments, locator_uuid_map, locator, uuid, db_manager): + def delete_completed_appointment(locator, uuid, appointments, locator_uuid_map, db_manager): # Delete the appointment appointment = appointments.pop(uuid) diff --git a/pisa/watcher.py b/pisa/watcher.py index 60d6ee1..d4c6cc2 100644 --- a/pisa/watcher.py +++ b/pisa/watcher.py @@ -144,8 +144,8 @@ class Watcher: ) # Delete the appointment and update db - Cleaner.delete_complete_appointment( - self.appointments, self.locator_uuid_map, filtered_match["locator"], uuid, self.db_manager + Cleaner.delete_completed_appointment( + filtered_match["locator"], uuid, self.appointments, self.locator_uuid_map, self.db_manager ) # Register the last processed block for the watcher From fb7dfd4df840cb74eded63c8c0f7d33ae8f672b7 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 11 Nov 2019 13:28:13 +0000 Subject: [PATCH 18/36] Add missing Cleaner unit tests --- test/unit/test_cleaner.py | 102 +++++++++++++++++++++++++++++++------- 1 file changed, 85 insertions(+), 17 deletions(-) diff --git a/test/unit/test_cleaner.py b/test/unit/test_cleaner.py index 3b845bc..6b22cd4 100644 --- a/test/unit/test_cleaner.py +++ b/test/unit/test_cleaner.py @@ -5,63 +5,80 @@ from pisa import c_logger from pisa.responder import Job from pisa.cleaner import Cleaner from pisa.appointment import Appointment +from pisa.db_manager import WATCHER_PREFIX from test.unit.conftest import get_random_value_hex CONFIRMATIONS = 6 ITEMS = 10 MAX_ITEMS = 100 -ITERATIONS = 1000 +ITERATIONS = 10 c_logger.disabled = True -def set_up_appointments(total_appointments): +# WIP: FIX CLEANER TESTS AFTER ADDING delete_complete_appointment +def set_up_appointments(db_manager, total_appointments): appointments = dict() locator_uuid_map = dict() - for _ in range(total_appointments): + for i in range(total_appointments): uuid = uuid4().hex locator = get_random_value_hex(32) - appointments[uuid] = Appointment(locator, None, None, None, None, None, None) + appointment = Appointment(locator, None, None, None, None, None, None) + appointments[uuid] = appointment locator_uuid_map[locator] = [uuid] - # Each locator can have more than one uuid assigned to it. Do a coin toss to add multiple ones - while random.randint(0, 1): + db_manager.store_watcher_appointment(uuid, appointment.to_json()) + db_manager.store_update_locator_map(locator, uuid) + + # Each locator can have more than one uuid assigned to it. + if i % 2: uuid = uuid4().hex - appointments[uuid] = Appointment(locator, None, None, None, None, None, None) + appointments[uuid] = appointment locator_uuid_map[locator].append(uuid) + db_manager.store_watcher_appointment(uuid, appointment.to_json()) + db_manager.store_update_locator_map(locator, uuid) + return appointments, locator_uuid_map -def set_up_jobs(total_jobs): +def set_up_jobs(db_manager, total_jobs): jobs = dict() tx_job_map = dict() - for _ in range(total_jobs): + for i in range(total_jobs): uuid = uuid4().hex + + # We use the same txid for justice and dispute here, it shouldn't matter txid = get_random_value_hex(32) # Assign both justice_txid and dispute_txid the same id (it shouldn't matter) - jobs[uuid] = Job(txid, txid, None, None) + job = Job(txid, txid, None, None) + jobs[uuid] = job tx_job_map[txid] = [uuid] - # Each justice_txid can have more than one uuid assigned to it. Do a coin toss to add multiple ones - while random.randint(0, 1): + db_manager.store_responder_job(uuid, job.to_json()) + db_manager.store_update_locator_map(job.locator, uuid) + + # Each justice_txid can have more than one uuid assigned to it. + if i % 2: uuid = uuid4().hex - jobs[uuid] = Job(txid, txid, None, None) + jobs[uuid] = job tx_job_map[txid].append(uuid) + db_manager.store_responder_job(uuid, job.to_json()) + db_manager.store_update_locator_map(job.locator, uuid) + return jobs, tx_job_map def test_delete_expired_appointment(db_manager): - for _ in range(ITERATIONS): - appointments, locator_uuid_map = set_up_appointments(MAX_ITEMS) + appointments, locator_uuid_map = set_up_appointments(db_manager, MAX_ITEMS) expired_appointments = random.sample(list(appointments.keys()), k=ITEMS) Cleaner.delete_expired_appointment(expired_appointments, appointments, locator_uuid_map, db_manager) @@ -69,11 +86,29 @@ def test_delete_expired_appointment(db_manager): assert not set(expired_appointments).issubset(appointments.keys()) -def test_delete_completed_jobs(db_manager): +def test_delete_completed_appointments(db_manager): + appointments, locator_uuid_map = set_up_appointments(db_manager, MAX_ITEMS) + uuids = list(appointments.keys()) + + for uuid in uuids: + Cleaner.delete_completed_appointment( + appointments[uuid].locator, uuid, appointments, locator_uuid_map, db_manager + ) + + # All appointments should have been deleted + assert len(appointments) == 0 + + # Make sure that all appointments are flagged as triggered in the db + db_appointments = db_manager.load_appointments_db(prefix=WATCHER_PREFIX) + for uuid in uuids: + assert db_appointments[uuid]["triggered"] is True + + +def test_delete_completed_jobs_db_match(db_manager): height = 0 for _ in range(ITERATIONS): - jobs, tx_job_map = set_up_jobs(MAX_ITEMS) + jobs, tx_job_map = set_up_jobs(db_manager, MAX_ITEMS) selected_jobs = random.sample(list(jobs.keys()), k=ITEMS) completed_jobs = [(job, 6) for job in selected_jobs] @@ -81,3 +116,36 @@ def test_delete_completed_jobs(db_manager): Cleaner.delete_completed_jobs(jobs, tx_job_map, completed_jobs, height, db_manager) assert not set(completed_jobs).issubset(jobs.keys()) + + +def test_delete_completed_jobs_no_db_match(db_manager): + height = 0 + + for _ in range(ITERATIONS): + jobs, tx_job_map = set_up_jobs(db_manager, MAX_ITEMS) + selected_jobs = random.sample(list(jobs.keys()), k=ITEMS) + + # Let's change some uuid's by creating new jobs that are not included in the db and share a justice_txid with + # another job that is stored in the db. + for uuid in selected_jobs[: ITEMS // 2]: + justice_txid = jobs[uuid].justice_txid + new_uuid = uuid4().hex + + jobs[new_uuid] = Job(justice_txid, justice_txid, None, None) + tx_job_map[justice_txid].append(new_uuid) + selected_jobs.append(new_uuid) + + # Let's add some random data + for i in range(ITEMS // 2): + uuid = uuid4().hex + txid = get_random_value_hex(32) + + jobs[uuid] = Job(txid, txid, None, None) + tx_job_map[txid] = [uuid] + selected_jobs.append(uuid) + + completed_jobs = [(job, 6) for job in selected_jobs] + + # We should be able to delete the correct ones and not fail in the others + Cleaner.delete_completed_jobs(jobs, tx_job_map, completed_jobs, height, db_manager) + assert not set(completed_jobs).issubset(jobs.keys()) From f81cb04b74c50a05cc62bcd3311a34b57211570b Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 11 Nov 2019 14:03:18 +0000 Subject: [PATCH 19/36] Fixes get_distance_to_tip --- pisa/block_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pisa/block_processor.py b/pisa/block_processor.py index c44ead1..e75895d 100644 --- a/pisa/block_processor.py +++ b/pisa/block_processor.py @@ -72,7 +72,7 @@ class BlockProcessor: chain_tip = self.get_best_block_hash() chain_tip_height = self.get_block(chain_tip).get("height") - target_block = self.get_block(target_block_hash).get("height") + target_block = self.get_block(target_block_hash) if target_block is not None: target_block_height = target_block.get("height") From c54d10f92cc3529b08b26360d6a0293be6c879e8 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 11 Nov 2019 17:50:28 +0000 Subject: [PATCH 20/36] Fixes `Responder` There were a couple of wrong things going on in the `Responder`: - `check_confirmations` included in 8a10979db09b854af41e2e6f1dc99fc498ed7707 was not doing a good job keeping track of the missed confirmations, probably due to a overlooked refactor. - There was an edge case when adding txs to `unconfirmed_txs` on `create_job` that may lead to a transaction being confirmed and unconfirmed at the same time from the `Responder's` pow. It could be triggered by adding two jobs with the same justice_txid. --- pisa/responder.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/pisa/responder.py b/pisa/responder.py index ecd516b..cdbb416 100644 --- a/pisa/responder.py +++ b/pisa/responder.py @@ -113,7 +113,8 @@ class Responder: else: self.tx_job_map[justice_txid] = [uuid] - if confirmations == 0: + # In the case we receive two jobs with the same justice txid we only add it to the unconfirmed txs list once + if justice_txid not in self.unconfirmed_txs and confirmations == 0: self.unconfirmed_txs.append(justice_txid) self.db_manager.store_responder_job(uuid, job.to_json()) @@ -145,7 +146,6 @@ class Responder: if block is not None: txs = block.get("tx") - height = block.get("height") logger.info( "New block received", block_hash=block_hash, prev_block_hash=block.get("previousblockhash"), txs=txs @@ -155,6 +155,7 @@ class Responder: if prev_block_hash == block.get("previousblockhash"): self.check_confirmations(txs) + height = block.get("height") txs_to_rebroadcast = self.get_txs_to_rebroadcast(txs) completed_jobs = self.get_completed_jobs(height) @@ -184,25 +185,25 @@ class Responder: logger.info("No more pending jobs, going back to sleep") - # NOTCOVERED def check_confirmations(self, txs): - + # If a new confirmed tx matches a tx we are watching, then we remove it from the unconfirmed txs map for tx in txs: + print(tx, tx in self.tx_job_map and tx in self.unconfirmed_txs) if tx in self.tx_job_map and tx in self.unconfirmed_txs: self.unconfirmed_txs.remove(tx) logger.info("Confirmation received for transaction", tx=tx) - elif tx in self.unconfirmed_txs: - if tx in self.missed_confirmations: - self.missed_confirmations[tx] += 1 + # We also add a missing confirmation to all those txs waiting to be confirmed that have not been confirmed in + # the current block + for tx in self.unconfirmed_txs: + if tx in self.missed_confirmations: + self.missed_confirmations[tx] += 1 - else: - self.missed_confirmations[tx] = 1 + else: + self.missed_confirmations[tx] = 1 - logger.info( - "Transaction missed a confirmation", tx=tx, missed_confirmations=self.missed_confirmations[tx] - ) + logger.info("Transaction missed a confirmation", tx=tx, missed_confirmations=self.missed_confirmations[tx]) def get_txs_to_rebroadcast(self, txs): txs_to_rebroadcast = [] From f65e2af675582e7211424117efc3284d4754210c Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 14 Nov 2019 15:03:03 +0000 Subject: [PATCH 21/36] Deletes missed print --- pisa/responder.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pisa/responder.py b/pisa/responder.py index cdbb416..fee87aa 100644 --- a/pisa/responder.py +++ b/pisa/responder.py @@ -188,7 +188,6 @@ class Responder: def check_confirmations(self, txs): # If a new confirmed tx matches a tx we are watching, then we remove it from the unconfirmed txs map for tx in txs: - print(tx, tx in self.tx_job_map and tx in self.unconfirmed_txs) if tx in self.tx_job_map and tx in self.unconfirmed_txs: self.unconfirmed_txs.remove(tx) From ca3f6ee1b11d1388cfae4679e2e3c6ce48b8c6e3 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 14 Nov 2019 15:04:55 +0000 Subject: [PATCH 22/36] Fixes memory leak bug regarding test db --- test/unit/conftest.py | 6 ++---- test/unit/test_db_manager.py | 15 ++++++++++++++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/test/unit/conftest.py b/test/unit/conftest.py index b76a367..df4f172 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -7,7 +7,6 @@ from threading import Thread from hashlib import sha256 from binascii import unhexlify -from pisa.conf import DB_PATH from apps.cli.blob import Blob from pisa.api import start_api from pisa.responder import Job @@ -31,8 +30,7 @@ def run_bitcoind(): @pytest.fixture(scope="session") -def run_api(): - db_manager = DBManager(DB_PATH) +def run_api(db_manager): watcher = Watcher(db_manager) api_thread = Thread(target=start_api, args=[watcher]) @@ -48,7 +46,7 @@ def prng_seed(): random.seed(0) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def db_manager(): manager = DBManager("test_db") yield manager diff --git a/test/unit/test_db_manager.py b/test/unit/test_db_manager.py index 28b7ba3..3507f5d 100644 --- a/test/unit/test_db_manager.py +++ b/test/unit/test_db_manager.py @@ -80,8 +80,18 @@ def test_load_appointments_db(db_manager): assert set(values) == set(local_appointments.values()) and (len(values) == len(local_appointments)) -def test_get_last_known_block(db_manager): +def test_get_last_known_block(): + db_path = "empty_db" + + # First we check if the db exists, and if so we delete it + if os.path.isdir(db_path): + shutil.rmtree(db_path) + + # Check that the db can be created if it does not exist + db_manager = open_create_db(db_path) + # Trying to get any last block for either the watcher or the responder should return None for an empty db + for key in [WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY]: assert db_manager.get_last_known_block(key) is None @@ -91,6 +101,9 @@ def test_get_last_known_block(db_manager): db_manager.db.put(key.encode("utf-8"), block_hash.encode("utf-8")) assert db_manager.get_last_known_block(key) == block_hash + # Removing test db + shutil.rmtree(db_path) + def test_create_entry(db_manager): key = get_random_value_hex(32) From 1d1efc9ccf291d65305a1818d6cd9a1d78f69275 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 11 Nov 2019 17:57:03 +0000 Subject: [PATCH 23/36] Adds some missing Responder unit tests Excluding the basic reorgs hangle, we need a better bitcoind mock or a different testing approach to reach a higher code coverage. This is as far as it can get for the Responder at the moment --- test/unit/test_responder.py | 141 ++++++++++++++++++++++++++++++++++-- 1 file changed, 135 insertions(+), 6 deletions(-) diff --git a/test/unit/test_responder.py b/test/unit/test_responder.py index 592a7f6..d8692bf 100644 --- a/test/unit/test_responder.py +++ b/test/unit/test_responder.py @@ -1,14 +1,17 @@ import json import pytest +import random from uuid import uuid4 +from copy import deepcopy from threading import Thread from queue import Queue, Empty from pisa import c_logger -from pisa.tools import check_txid_format, bitcoin_cli from test.simulator.utils import sha256d from pisa.responder import Responder, Job from test.simulator.bitcoind_sim import TX +from pisa.block_processor import BlockProcessor +from pisa.tools import check_txid_format, bitcoin_cli from test.unit.conftest import generate_block, generate_blocks, get_random_value_hex c_logger.disabled = True @@ -64,6 +67,23 @@ def test_job_init(run_bitcoind): ) +def test_on_sync(run_bitcoind, responder): + # We're on sync if we're 1 or less blocks behind the tip + chain_tip = BlockProcessor.get_best_block_hash() + assert Responder.on_sync(chain_tip) is True + + generate_block() + assert Responder.on_sync(chain_tip) is True + + +def test_on_sync_fail(responder): + # This should fail if we're more than 1 block behind the tip + chain_tip = BlockProcessor.get_best_block_hash() + generate_blocks(2) + + assert Responder.on_sync(chain_tip) is False + + def test_job_to_dict(): job = create_dummy_job() job_dict = job.to_dict() @@ -86,6 +106,28 @@ def test_job_to_json(): ) +def test_job_from_dict(): + job_dict = create_dummy_job().to_dict() + new_job = Job.from_dict(job_dict) + + assert job_dict == new_job.to_dict() + + +def test_job_from_dict_invalid_data(): + job_dict = create_dummy_job().to_dict() + + for value in ["dispute_txid", "justice_txid", "justice_rawtx", "appointment_end"]: + job_dict_copy = deepcopy(job_dict) + job_dict_copy[value] = None + + try: + Job.from_dict(job_dict_copy) + assert False + + except ValueError: + assert True + + def test_init_responder(responder): assert type(responder.jobs) is dict and len(responder.jobs) == 0 assert type(responder.tx_job_map) is dict and len(responder.tx_job_map) == 0 @@ -96,14 +138,11 @@ def test_init_responder(responder): assert responder.zmq_subscriber is None -def test_add_response(responder): +def test_add_response(db_manager): + responder = Responder(db_manager) uuid = uuid4().hex job = create_dummy_job() - # The responder automatically fires create_job on adding a job if it is asleep (initial state). Avoid this by - # setting the state to awake. - responder.asleep = False - # The block_hash passed to add_response does not matter much now. It will in the future to deal with errors receipt = responder.add_response( uuid, @@ -116,6 +155,36 @@ def test_add_response(responder): assert receipt.delivered is True + # The responder automatically fires create_job on adding a job if it is asleep. We need to stop the processes now. + # To do so we delete all the jobs, stop the zmq and create a new fake block to unblock the queue.get method + responder.jobs = dict() + responder.zmq_subscriber.terminate = True + responder.block_queue.put(get_random_value_hex(32)) + + +def test_add_bad_response(responder): + uuid = uuid4().hex + job = create_dummy_job() + + # Now that the asleep / awake functionality has been tested we can avoid manually killing the responder by setting + # to awake. That will prevent the zmq thread to be launched again. + responder.asleep = False + + # A txid instead of a rawtx should be enough for unit tests using the bitcoind mock, better tests are needed though. + job.justice_rawtx = job.justice_txid + + # The block_hash passed to add_response does not matter much now. It will in the future to deal with errors + receipt = responder.add_response( + uuid, + job.dispute_txid, + job.justice_txid, + job.justice_rawtx, + job.appointment_end, + block_hash=get_random_value_hex(32), + ) + + assert receipt.delivered is False + def test_create_job(responder): responder.asleep = False @@ -147,6 +216,33 @@ def test_create_job(responder): ) +def test_create_job_same_justice_txid(responder): + # Create the same job using two different uuids + confirmations = 0 + dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data(random_txid=True) + uuid_1 = uuid4().hex + uuid_2 = uuid4().hex + + responder.create_job(uuid_1, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations) + responder.create_job(uuid_2, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations) + + # Check that both jobs have been added + assert uuid_1 in responder.jobs and uuid_2 in responder.jobs + assert justice_txid in responder.tx_job_map + assert justice_txid in responder.unconfirmed_txs + + # Check that the rest of job data also matches + for uuid in [uuid_1, uuid_2]: + job = responder.jobs[uuid] + assert ( + job.dispute_txid == dispute_txid + and job.justice_txid == justice_txid + and job.justice_rawtx == justice_rawtx + and job.appointment_end == appointment_end + and job.appointment_end == appointment_end + ) + + def test_create_job_already_confirmed(responder): responder.asleep = False @@ -233,6 +329,39 @@ def test_do_watch(responder): assert responder.asleep is True +def test_check_confirmations(responder): + # Reinitializing responder (but keeping the subscriber) + responder.jobs = dict() + responder.tx_job_map = dict() + responder.unconfirmed_txs = [] + responder.missed_confirmations = dict() + + # check_confirmations checks, given a list of transaction for a block, what of the known justice transaction have + # been confirmed. To test this we need to create a list of transactions and the state of the responder + txs = [get_random_value_hex(32) for _ in range(20)] + + # The responder has a list of unconfirmed transaction, let make that some of them are the ones we've received + responder.unconfirmed_txs = [get_random_value_hex(32) for _ in range(10)] + txs_subset = random.sample(txs, k=10) + responder.unconfirmed_txs.extend(txs_subset) + + # We also need to add them to the tx_job_map since they would be there in normal conditions + responder.tx_job_map = {txid: Job(txid, None, None, None) for txid in responder.unconfirmed_txs} + + # Let's make sure that there are no txs with missed confirmations yet + assert len(responder.missed_confirmations) == 0 + + responder.check_confirmations(txs) + + # After checking confirmations the txs in txs_subset should be confirmed (not part of unconfirmed_txs anymore) + # and the rest should have a missing confirmation + for tx in txs_subset: + assert tx not in responder.unconfirmed_txs + + for tx in responder.unconfirmed_txs: + assert responder.missed_confirmations[tx] == 1 + + def test_get_txs_to_rebroadcast(responder): # Let's create a few fake txids and assign at least 6 missing confirmations to each txs_missing_too_many_conf = {get_random_value_hex(32): 6 + i for i in range(10)} From f33d2d61acef30409b88b3d58a73b9087f0060f8 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 14 Nov 2019 15:40:02 +0000 Subject: [PATCH 24/36] Moves start api to test_api since it was the only place where it's used --- test/unit/conftest.py | 14 -------------- test/unit/test_api.py | 20 ++++++++++++++++++-- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/test/unit/conftest.py b/test/unit/conftest.py index df4f172..1f2810c 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -8,9 +8,7 @@ from hashlib import sha256 from binascii import unhexlify from apps.cli.blob import Blob -from pisa.api import start_api from pisa.responder import Job -from pisa.watcher import Watcher from pisa.tools import bitcoin_cli from pisa.db_manager import DBManager from pisa.appointment import Appointment @@ -29,18 +27,6 @@ def run_bitcoind(): sleep(0.1) -@pytest.fixture(scope="session") -def run_api(db_manager): - watcher = Watcher(db_manager) - - api_thread = Thread(target=start_api, args=[watcher]) - api_thread.daemon = True - api_thread.start() - - # It takes a little bit of time to start the API (otherwise the requests are sent too early and they fail) - sleep(0.1) - - @pytest.fixture(scope="session", autouse=True) def prng_seed(): random.seed(0) diff --git a/test/unit/test_api.py b/test/unit/test_api.py index 8872811..b51ef07 100644 --- a/test/unit/test_api.py +++ b/test/unit/test_api.py @@ -1,11 +1,15 @@ import json import pytest import requests +from time import sleep +from threading import Thread -from pisa import HOST, PORT, c_logger +from pisa.api import start_api +from pisa.watcher import Watcher from pisa.tools import bitcoin_cli -from test.unit.conftest import generate_blocks, get_random_value_hex, generate_dummy_appointment_data +from pisa import HOST, PORT, c_logger from pisa.conf import MAX_APPOINTMENTS +from test.unit.conftest import generate_blocks, get_random_value_hex, generate_dummy_appointment_data c_logger.disabled = True @@ -16,6 +20,18 @@ appointments = [] locator_dispute_tx_map = {} +@pytest.fixture(scope="module") +def run_api(db_manager): + watcher = Watcher(db_manager) + + api_thread = Thread(target=start_api, args=[watcher]) + api_thread.daemon = True + api_thread.start() + + # It takes a little bit of time to start the API (otherwise the requests are sent too early and they fail) + sleep(0.1) + + @pytest.fixture def new_appointment(): appointment, dispute_tx = generate_dummy_appointment_data() From 3a1bf0cc8a257a66caebef4b412be4adb4ad7ce7 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 14 Nov 2019 17:35:52 +0000 Subject: [PATCH 25/36] Updates API to use DB on get methods --- pisa/api.py | 38 +++++++++++++++++--------------------- pisa/db_manager.py | 11 +++++++++++ 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/pisa/api.py b/pisa/api.py index 562386f..131ce43 100644 --- a/pisa/api.py +++ b/pisa/api.py @@ -78,24 +78,26 @@ def get_appointment(): # ToDo: #15-add-system-monitor - appointment_in_watcher = watcher.locator_uuid_map.get(locator) + locator_map = watcher.db_manager.load_locator_map(locator) - if appointment_in_watcher: - for uuid in appointment_in_watcher: - appointment_data = watcher.appointments[uuid].to_dict() - appointment_data["status"] = "being_watched" - response.append(appointment_data) + if locator_map is not None: + for uuid in locator_map: + appointment_data = watcher.db_manager.load_watcher_appointment(uuid) - if watcher.responder: - responder_jobs = watcher.responder.jobs + if appointment_data is not None and appointment_data["triggered"] is False: + # Triggered is an internal flag that does not need to be send + del appointment_data["triggered"] - for job in responder_jobs.values(): - if job.locator == locator: - job_data = job.to_dict() + appointment_data["status"] = "being_watched" + response.append(appointment_data) + + job_data = watcher.db_manager.load_responder_job(uuid) + + if job_data is not None: job_data["status"] = "dispute_responded" response.append(job_data) - if not response: + else: response.append({"locator": locator, "status": "not_found"}) response = jsonify(response) @@ -105,18 +107,12 @@ def get_appointment(): @app.route("/get_all_appointments", methods=["GET"]) def get_all_appointments(): - watcher_appointments = {} - responder_jobs = {} - # ToDo: #15-add-system-monitor + response = None if request.remote_addr in request.host or request.remote_addr == "127.0.0.1": - for uuid, appointment in watcher.appointments.items(): - watcher_appointments[uuid] = appointment.to_dict() - - if watcher.responder: - for uuid, job in watcher.responder.jobs.items(): - responder_jobs[uuid] = job.to_dict() + watcher_appointments = watcher.db_manager.load_watcher_appointments() + responder_jobs = watcher.db_manager.load_responder_jobs() response = jsonify({"watcher_appointments": watcher_appointments, "responder_jobs": responder_jobs}) diff --git a/pisa/db_manager.py b/pisa/db_manager.py index d7ce42c..0b3d40a 100644 --- a/pisa/db_manager.py +++ b/pisa/db_manager.py @@ -52,6 +52,11 @@ class DBManager: self.db.put(key, value) + def load_entry(self, key): + data = self.db.get(key.encode("utf-8")) + data = json.loads(data) if data is not None else data + return data + def delete_entry(self, key, prefix=None): if isinstance(prefix, str): key = prefix + key @@ -60,6 +65,12 @@ class DBManager: self.db.delete(key) + def load_watcher_appointment(self, key): + return self.load_entry(WATCHER_PREFIX + key) + + def load_responder_job(self, key): + return self.load_entry(RESPONDER_PREFIX + key) + def load_watcher_appointments(self): all_appointments = self.load_appointments_db(prefix=WATCHER_PREFIX) non_triggered_appointments = { From 6ff580ed8ff2125bb857442d9298d9e4622a8188 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Thu, 14 Nov 2019 17:36:26 +0000 Subject: [PATCH 26/36] Updates test_api according to 1b78060 and add missing cases --- test/unit/test_api.py | 74 +++++++++++++++++++++++++++---------------- 1 file changed, 47 insertions(+), 27 deletions(-) diff --git a/test/unit/test_api.py b/test/unit/test_api.py index b51ef07..5902c5d 100644 --- a/test/unit/test_api.py +++ b/test/unit/test_api.py @@ -9,7 +9,7 @@ from pisa.watcher import Watcher from pisa.tools import bitcoin_cli from pisa import HOST, PORT, c_logger from pisa.conf import MAX_APPOINTMENTS -from test.unit.conftest import generate_blocks, get_random_value_hex, generate_dummy_appointment_data +from test.unit.conftest import generate_block, generate_blocks, get_random_value_hex, generate_dummy_appointment_data c_logger.disabled = True @@ -36,6 +36,7 @@ def run_api(db_manager): def new_appointment(): appointment, dispute_tx = generate_dummy_appointment_data() locator_dispute_tx_map[appointment["locator"]] = dispute_tx + del appointment["triggered"] return appointment @@ -60,28 +61,6 @@ def test_add_appointment(run_api, run_bitcoind, new_appointment): assert r.status_code == 400 -def test_request_appointment(new_appointment): - # First we need to add an appointment - r = add_appointment(new_appointment) - assert r.status_code == 200 - - # Next we can request it - r = requests.get(url=PISA_API + "/get_appointment?locator=" + new_appointment["locator"]) - assert r.status_code == 200 - - # Each locator may point to multiple appointments, check them all - received_appointments = json.loads(r.content) - - # Take the status out and leave the received appointments ready to compare - appointment_status = [appointment.pop("status") for appointment in received_appointments] - - # Check that the appointment is within the received appoints - assert new_appointment in received_appointments - - # Check that all the appointments are being watched - assert all([status == "being_watched" for status in appointment_status]) - - def test_request_random_appointment(): r = requests.get(url=PISA_API + "/get_appointment?locator=" + get_random_value_hex(32)) assert r.status_code == 200 @@ -105,7 +84,7 @@ def test_request_multiple_appointments_same_locator(new_appointment, n=MULTIPLE_ r = add_appointment(new_appointment) assert r.status_code == 200 - test_request_appointment(new_appointment) + test_request_appointment_watcher(new_appointment) def test_add_too_many_appointment(new_appointment): @@ -149,8 +128,49 @@ def test_get_all_appointments_responder(): responder_jobs = [v["locator"] for k, v in received_appointments["responder_jobs"].items()] local_locators = [appointment["locator"] for appointment in appointments] - watcher_appointments = [v["locator"] for k, v in received_appointments["watcher_appointments"].items()] - print(set(watcher_appointments) == set(local_locators)) - assert set(responder_jobs) == set(local_locators) assert len(received_appointments["watcher_appointments"]) == 0 + + +def test_request_appointment_watcher(new_appointment): + # First we need to add an appointment + r = add_appointment(new_appointment) + assert r.status_code == 200 + + # Next we can request it + r = requests.get(url=PISA_API + "/get_appointment?locator=" + new_appointment["locator"]) + assert r.status_code == 200 + + # Each locator may point to multiple appointments, check them all + received_appointments = json.loads(r.content) + + # Take the status out and leave the received appointments ready to compare + appointment_status = [appointment.pop("status") for appointment in received_appointments] + + # Check that the appointment is within the received appoints + assert new_appointment in received_appointments + + # Check that all the appointments are being watched + assert all([status == "being_watched" for status in appointment_status]) + + +def test_request_appointment_responder(new_appointment): + # Let's do something similar to what we did with the watcher but now we'll send the dispute tx to the network + dispute_tx = locator_dispute_tx_map[new_appointment["locator"]] + bitcoin_cli().sendrawtransaction(dispute_tx) + + r = add_appointment(new_appointment) + assert r.status_code == 200 + + # Generate a block to trigger the watcher + generate_block() + + r = requests.get(url=PISA_API + "/get_appointment?locator=" + new_appointment["locator"]) + assert r.status_code == 200 + + received_appointments = json.loads(r.content) + appointment_status = [appointment.pop("status") for appointment in received_appointments] + appointment_locators = [appointment["locator"] for appointment in received_appointments] + + assert new_appointment["locator"] in appointment_locators and len(received_appointments) == 1 + assert all([status == "dispute_responded" for status in appointment_status]) and len(appointment_status) == 1 From a1c0eeadbccfc653247ab89c94b637855ce9cbe3 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 18 Nov 2019 15:25:15 +0000 Subject: [PATCH 27/36] Add missing EncryptedBlob tests --- test/unit/test_encrypted_blob.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/test/unit/test_encrypted_blob.py b/test/unit/test_encrypted_blob.py index 33b4ece..98119c4 100644 --- a/test/unit/test_encrypted_blob.py +++ b/test/unit/test_encrypted_blob.py @@ -11,6 +11,24 @@ def test_init_encrypted_blob(): assert EncryptedBlob(data).data == data +def test_init_encrypted_blob_wrong_cipher(): + try: + EncryptedBlob(get_random_value_hex(64), cipher="") + assert False + + except ValueError: + assert True + + +def test_init_encrypted_blob_wrong_hash_function(): + try: + EncryptedBlob(get_random_value_hex(64), hash_function="") + assert False + + except ValueError: + assert True + + def test_equal(): data = get_random_value_hex(64) e_blob1 = EncryptedBlob(data) From 764e513ab96cade911475e2ebf5c296254f8327d Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 18 Nov 2019 16:39:23 +0000 Subject: [PATCH 28/36] Replaces asserts for Exceptions in simulator --- test/simulator/utils.py | 11 ++++++++--- test/unit/test_block_processor.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/test/simulator/utils.py b/test/simulator/utils.py index 3df996f..dce3f7c 100644 --- a/test/simulator/utils.py +++ b/test/simulator/utils.py @@ -31,9 +31,14 @@ def parse_varint(tx): # First of all, the offset of the hex transaction if moved to the proper position (i.e where the varint should be # located) and the length and format of the data to be analyzed is checked. data = tx.hex[tx.offset :] - assert len(data) > 0 - size = int(data[:2], 16) - assert size <= 255 + if len(data) > 0: + size = int(data[:2], 16) + + else: + raise ValueError("No data to be parsed") + + if size > 255: + raise ValueError("Wrong value (varint size > 255)") # Then, the integer is encoded as a varint using the proper prefix, if needed. if size <= 252: # No prefix diff --git a/test/unit/test_block_processor.py b/test/unit/test_block_processor.py index ab16736..69c03fe 100644 --- a/test/unit/test_block_processor.py +++ b/test/unit/test_block_processor.py @@ -2,6 +2,7 @@ import pytest from pisa import c_logger from pisa.block_processor import BlockProcessor +from pisa.utils.auth_proxy import JSONRPCException from test.unit.conftest import get_random_value_hex c_logger.disabled = True @@ -36,3 +37,33 @@ def test_get_random_block(): def test_get_block_count(): block_count = BlockProcessor.get_block_count() assert isinstance(block_count, int) and block_count >= 0 + + +def test_decode_raw_transaction(): + # We cannot exhaustively test this (we rely on bitcoind for this) but we can try to decode a correct transaction + hex_tx = ( + "0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402" + "204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4" + "acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b" + "13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba" + "ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e" + "cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000" + ) + + try: + BlockProcessor.decode_raw_transaction(hex_tx) + assert True + except JSONRPCException: + assert False + + +def test_decode_raw_transaction_invalid(): + # Same but with an invalid one + + hex_tx = "A" * 16 + + try: + BlockProcessor.decode_raw_transaction(hex_tx) + assert False + except JSONRPCException: + assert True From 287dfeeee3edc1d379318c917a3adf8fd4f1351c Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Mon, 18 Nov 2019 16:39:50 +0000 Subject: [PATCH 29/36] Adds missing test on BlockProessor --- test/unit/test_block_processor.py | 66 ++++++++++++++++++++----------- 1 file changed, 44 insertions(+), 22 deletions(-) diff --git a/test/unit/test_block_processor.py b/test/unit/test_block_processor.py index 69c03fe..972d42c 100644 --- a/test/unit/test_block_processor.py +++ b/test/unit/test_block_processor.py @@ -2,11 +2,19 @@ import pytest from pisa import c_logger from pisa.block_processor import BlockProcessor -from pisa.utils.auth_proxy import JSONRPCException -from test.unit.conftest import get_random_value_hex +from test.unit.conftest import get_random_value_hex, generate_block, generate_blocks c_logger.disabled = True +hex_tx = ( + "0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402" + "204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4" + "acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b" + "13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba" + "ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e" + "cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000" +) + @pytest.fixture def best_block_hash(): @@ -41,29 +49,43 @@ def test_get_block_count(): def test_decode_raw_transaction(): # We cannot exhaustively test this (we rely on bitcoind for this) but we can try to decode a correct transaction - hex_tx = ( - "0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402" - "204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4" - "acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b" - "13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba" - "ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e" - "cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000" - ) - - try: - BlockProcessor.decode_raw_transaction(hex_tx) - assert True - except JSONRPCException: - assert False + assert BlockProcessor.decode_raw_transaction(hex_tx) is not None def test_decode_raw_transaction_invalid(): # Same but with an invalid one + assert BlockProcessor.decode_raw_transaction(hex_tx[::-1]) is None - hex_tx = "A" * 16 - try: - BlockProcessor.decode_raw_transaction(hex_tx) - assert False - except JSONRPCException: - assert True +def test_get_missed_blocks(): + block_processor = BlockProcessor() + target_block = block_processor.get_best_block_hash() + + # Generate some blocks and store the hash in a list + missed_blocks = [] + for _ in range(5): + generate_block() + missed_blocks.append(BlockProcessor.get_best_block_hash()) + + # Check what we've missed + assert block_processor.get_missed_blocks(target_block) == missed_blocks + + # We can see how it does not work if we replace the target by the first element in the list + block_tip = missed_blocks[0] + assert block_processor.get_missed_blocks(block_tip) != missed_blocks + + # But it does again if we skip that block + assert block_processor.get_missed_blocks(block_tip) == missed_blocks[1:] + + +def test_get_distance_to_tip(): + target_distance = 5 + + block_processor = BlockProcessor() + target_block = block_processor.get_best_block_hash() + + # Mine some blocks up to the target distance + generate_blocks(target_distance) + + # Check if the distance is properly computed + assert block_processor.get_distance_to_tip(target_block) == target_distance From f91413ebd8f6fe36daebc28e6847eb0aefda1435 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 20 Nov 2019 12:59:29 +0000 Subject: [PATCH 30/36] Properly handles mempool checks Transactions were included in the mempool as rawtxs but checked as txids, so effectively every time we checked if a transaction was in mempool it returned false --- test/simulator/bitcoind_sim.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/test/simulator/bitcoind_sim.py b/test/simulator/bitcoind_sim.py index f9f3319..a023950 100644 --- a/test/simulator/bitcoind_sim.py +++ b/test/simulator/bitcoind_sim.py @@ -3,6 +3,7 @@ import time import json import logging import binascii +from itertools import islice from threading import Thread, Event from flask import Flask, request, Response, abort @@ -19,7 +20,7 @@ PORT = "18443" blockchain = [] blocks = {} mined_transactions = {} -mempool = [] +mempool = {} mine_new_block = Event() @@ -138,7 +139,7 @@ def process_request(): if TX.deserialize(rawtx) is not None: if txid not in list(mined_transactions.keys()): - mempool.append(rawtx) + mempool[txid] = rawtx response["result"] = {"txid": txid} else: @@ -164,7 +165,7 @@ def process_request(): response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get("height")} elif txid in mempool: - response["result"] = {"confirmations": 0} + response["result"] = {"confirmations": None} else: response["error"] = { @@ -260,11 +261,9 @@ def simulate_mining(mode, time_between_blocks): if len(mempool) != 0: # We'll mine up to 100 txs per block - for rawtx in mempool[:99]: - txid = sha256d(rawtx) + for txid, rawtx in dict(islice(mempool.items(), 99)).items(): txs_to_mine[txid] = rawtx - - mempool = mempool[99:] + mempool.pop(txid) # Keep track of the mined transaction (to respond to getrawtransaction) for txid, tx in txs_to_mine.items(): From 05961f1632d902316dae5a6867a102a7c37c525a Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 20 Nov 2019 13:01:27 +0000 Subject: [PATCH 31/36] Moves tx_in_chain to Carrier Also integrates it properly so it uses self.get_transaction() instead of bitcoin_cli straightaway. Error messages have also been merged / modified --- pisa/carrier.py | 20 +++++++++++++++++++- pisa/tools.py | 30 ------------------------------ 2 files changed, 19 insertions(+), 31 deletions(-) diff --git a/pisa/carrier.py b/pisa/carrier.py index c89f656..ff2d8b9 100644 --- a/pisa/carrier.py +++ b/pisa/carrier.py @@ -80,10 +80,28 @@ class Carrier: # reorged while we were querying bitcoind to get the confirmation count. In such a case we just # restart the job if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY: - logger.info("Transaction got reorged before obtaining information", txid=txid) + logger.info("Transaction not found in mempool nor blockchain", txid=txid) else: # If something else happens (unlikely but possible) log it so we can treat it in future releases logger.error("JSONRPCException.", method="Carrier.get_transaction", error=e.error) return tx_info + + def check_tx_in_chain(self, txid): + tx_in_chain = False + confirmations = None + + tx_info = self.get_transaction(txid) + + if tx_info is not None: + confirmations = int(tx_info.get("confirmations")) if tx_info.get("confirmations") is not None else None + + if confirmations is not None: + tx_in_chain = True + logger.error("Transaction found in the blockchain", txid=txid) + + else: + logger.error("Transaction found in mempool", txid=txid) + + return tx_in_chain, confirmations diff --git a/pisa/tools.py b/pisa/tools.py index d0f88f2..9222e0c 100644 --- a/pisa/tools.py +++ b/pisa/tools.py @@ -2,8 +2,6 @@ import re from http.client import HTTPException import pisa.conf as conf -from pisa.logger import Logger -from pisa.rpc_errors import RPC_INVALID_ADDRESS_OR_KEY from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException @@ -14,34 +12,6 @@ def bitcoin_cli(): ) -# TODO: currently only used in the Responder; might move there or in the BlockProcessor -# NOTCOVERED -def check_tx_in_chain(tx_id, logger=Logger(), tx_label="Transaction"): - tx_in_chain = False - confirmations = 0 - - try: - tx_info = bitcoin_cli().getrawtransaction(tx_id, 1) - - if tx_info.get("confirmations"): - confirmations = int(tx_info.get("confirmations")) - tx_in_chain = True - logger.error("{} found in the blockchain".format(tx_label), txid=tx_id) - - else: - logger.error("{} found in mempool".format(tx_label), txid=tx_id) - - except JSONRPCException as e: - if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY: - logger.error("{} not found in mempool nor blockchain".format(tx_label), txid=tx_id) - - else: - # ToDO: Unhandled errors, check this properly - logger.error("JSONRPCException.", method="tools.check_tx_in_chain", error=e.error) - - return tx_in_chain, confirmations - - # NOTCOVERED def can_connect_to_bitcoind(): can_connect = True From 3dad5b7c714c8ea22f33ba6e1d48aa8aba93eaa6 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 20 Nov 2019 13:01:56 +0000 Subject: [PATCH 32/36] Adds tests for check_tx_in_chain --- test/unit/test_carrier.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/test/unit/test_carrier.py b/test/unit/test_carrier.py index 20eeaf7..00636d0 100644 --- a/test/unit/test_carrier.py +++ b/test/unit/test_carrier.py @@ -4,7 +4,7 @@ from pisa import c_logger from pisa.carrier import Carrier from test.simulator.utils import sha256d from test.simulator.transaction import TX -from test.unit.conftest import generate_blocks, get_random_value_hex +from test.unit.conftest import generate_blocks, generate_block, get_random_value_hex from pisa.rpc_errors import RPC_VERIFY_ALREADY_IN_CHAIN, RPC_DESERIALIZATION_ERROR c_logger.disabled = True @@ -72,3 +72,24 @@ def test_get_non_existing_transaction(): tx_info = Carrier.get_transaction(get_random_value_hex(32)) assert tx_info is None + + +def test_check_tx_in_chain(carrier): + # Let's starts by looking for a random transaction + random_tx = TX.create_dummy_transaction() + random_txid = sha256d(random_tx) + tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) + assert tx_in_chain is False and confirmations is None + + # We can now broadcast the transaction and check again + carrier.send_transaction(random_tx, random_txid) + tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) + + # The tx should be on mempool now, so same + assert tx_in_chain is False and confirmations is None + + # Finally we can mine a block and check again + generate_block() + tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) + + assert tx_in_chain is True and confirmations == 1 From 2183c57f53e3b6ade75bd7ccd9859adba67a5028 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 20 Nov 2019 15:32:04 +0000 Subject: [PATCH 33/36] Updates handle_reorgs and deletes check_tx_in_chain Updates handle_reorg to work with the current version of the Responder (the old code was outdated and broken). `check_tx_in_chain` was only used by `handle_reorgs`, and made not much sense at this point. The method need to check if the transaction is in mempool or blockchain, so it seems to make more sense bulding on top of `get_transaction`. --- pisa/carrier.py | 18 -------------- pisa/responder.py | 51 ++++++++++++++++++++------------------- test/unit/test_carrier.py | 21 ---------------- 3 files changed, 26 insertions(+), 64 deletions(-) diff --git a/pisa/carrier.py b/pisa/carrier.py index ff2d8b9..627b917 100644 --- a/pisa/carrier.py +++ b/pisa/carrier.py @@ -87,21 +87,3 @@ class Carrier: logger.error("JSONRPCException.", method="Carrier.get_transaction", error=e.error) return tx_info - - def check_tx_in_chain(self, txid): - tx_in_chain = False - confirmations = None - - tx_info = self.get_transaction(txid) - - if tx_info is not None: - confirmations = int(tx_info.get("confirmations")) if tx_info.get("confirmations") is not None else None - - if confirmations is not None: - tx_in_chain = True - logger.error("Transaction found in the blockchain", txid=txid) - - else: - logger.error("Transaction found in mempool", txid=txid) - - return tx_in_chain, confirmations diff --git a/pisa/responder.py b/pisa/responder.py index fee87aa..2cc00ff 100644 --- a/pisa/responder.py +++ b/pisa/responder.py @@ -7,7 +7,6 @@ from binascii import unhexlify from pisa.logger import Logger from pisa.cleaner import Cleaner from pisa.carrier import Carrier -from pisa.tools import check_tx_in_chain from pisa.block_processor import BlockProcessor from pisa.utils.zmq_subscriber import ZMQHandler @@ -171,7 +170,7 @@ class Responder: ) # ToDo: #24-properly-handle-reorgs - self.handle_reorgs() + self.handle_reorgs(block_hash) # Register the last processed block for the responder self.db_manager.store_last_block_hash_responder(block_hash) @@ -262,41 +261,43 @@ class Responder: return receipts - # FIXME: Legacy code, must be checked and updated/fixed # NOTCOVERED - def handle_reorgs(self): + def handle_reorgs(self, block_hash): + carrier = Carrier() + for uuid, job in self.jobs.items(): - # First we check if the dispute transaction is still in the blockchain. If not, the justice can not be - # there either, so we'll need to call the reorg manager straight away - dispute_in_chain, _ = check_tx_in_chain(job.dispute_txid, logger=logger, tx_label="Dispute tx") + # First we check if the dispute transaction is known (exists either in mempool or blockchain) + dispute_tx = carrier.get_transaction(job.dispute_txid) - # If the dispute is there, we can check the justice tx - if dispute_in_chain: - justice_in_chain, justice_confirmations = check_tx_in_chain( - job.justice_txid, logger=logger, tx_label="Justice tx" - ) + if dispute_tx is not None: + # If the dispute is there, we check the justice + justice_tx = carrier.get_transaction(job.justice_txid) - # If both transactions are there, we only need to update the justice tx confirmation count - if justice_in_chain: - logger.info( - "Updating confirmation count for transaction.", - justice_txid=job.justice_txid, - prev_count=job.confirmations, - curr_count=justice_confirmations, - ) + if justice_tx is not None: + # If the justice exists we need to check is it's on the blockchain or not so we can update the + # unconfirmed transactions list accordingly. + if justice_tx.get("confirmations") is None: + self.unconfirmed_txs.append(job.justice_txid) - job.confirmations = justice_confirmations + logger.info( + "Justice transaction back in mempool. Updating unconfirmed transactions.", + justice_txid=job.justice_txid, + ) else: - # Otherwise, we will add the job back (implying rebroadcast of the tx) and monitor it again + # If the justice transaction is missing, we need to reset the job. # DISCUSS: Adding job back, should we flag it as retried? # FIXME: Whether we decide to increase the retried counter or not, the current counter should be # maintained. There is no way of doing so with the current approach. Update if required - self.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx, job.appointment_end) + self.add_response( + uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx, job.appointment_end, block_hash + ) + + logger.warning("Justice transaction banished. Resetting the job", justice_tx=job.justice_txid) else: # ToDo: #24-properly-handle-reorgs # FIXME: if the dispute is not on chain (either in mempool or not there at all), we need to call the # reorg manager - logger.warning("Dispute and justice transaction missing. Calling the reorg manager") - logger.error("Reorg manager not yet implemented") + logger.warning("Dispute and justice transaction missing. Calling the reorg manager.") + logger.error("Reorg manager not yet implemented.") diff --git a/test/unit/test_carrier.py b/test/unit/test_carrier.py index 00636d0..4e6eddf 100644 --- a/test/unit/test_carrier.py +++ b/test/unit/test_carrier.py @@ -72,24 +72,3 @@ def test_get_non_existing_transaction(): tx_info = Carrier.get_transaction(get_random_value_hex(32)) assert tx_info is None - - -def test_check_tx_in_chain(carrier): - # Let's starts by looking for a random transaction - random_tx = TX.create_dummy_transaction() - random_txid = sha256d(random_tx) - tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) - assert tx_in_chain is False and confirmations is None - - # We can now broadcast the transaction and check again - carrier.send_transaction(random_tx, random_txid) - tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) - - # The tx should be on mempool now, so same - assert tx_in_chain is False and confirmations is None - - # Finally we can mine a block and check again - generate_block() - tx_in_chain, confirmations = carrier.check_tx_in_chain(random_txid) - - assert tx_in_chain is True and confirmations == 1 From d2b7216a96d5ebfc98781bf65a5b220463734006 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Fri, 22 Nov 2019 14:48:38 +0000 Subject: [PATCH 34/36] Addresses requested changes --- pisa/cryptographer.py | 2 +- test/unit/test_appointment.py | 15 ++++++++------ test/unit/test_cleaner.py | 19 ++++++++++-------- test/unit/test_responder.py | 38 ++++++++++++++++++++++++----------- test/unit/test_watcher.py | 4 ++-- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/pisa/cryptographer.py b/pisa/cryptographer.py index efe493c..0dd507a 100644 --- a/pisa/cryptographer.py +++ b/pisa/cryptographer.py @@ -35,7 +35,7 @@ class Cryptographer: "Creating new blob.", master_key=hexlify(master_key).decode(), sk=hexlify(sk).decode(), - nonce=hexlify(sk).decode(), + nonce=hexlify(nonce).decode(), encrypted_blob=encrypted_blob.data, ) diff --git a/test/unit/test_appointment.py b/test/unit/test_appointment.py index e314d5e..329ef10 100644 --- a/test/unit/test_appointment.py +++ b/test/unit/test_appointment.py @@ -111,14 +111,17 @@ def test_from_dict(appointment_data): assert isinstance(appointment, Appointment) # Otherwise it should fail - appointment_data["hash_function"] = None + for key in appointment_data.keys(): + prev_val = appointment_data[key] + appointment_data[key] = None - try: - Appointment.from_dict(appointment_data) - assert False + try: + Appointment.from_dict(appointment_data) + assert False - except ValueError: - assert True + except ValueError: + appointment_data[key] = prev_val + assert True def test_serialize(appointment_data): diff --git a/test/unit/test_cleaner.py b/test/unit/test_cleaner.py index 6b22cd4..8b08df1 100644 --- a/test/unit/test_cleaner.py +++ b/test/unit/test_cleaner.py @@ -53,12 +53,13 @@ def set_up_jobs(db_manager, total_jobs): uuid = uuid4().hex # We use the same txid for justice and dispute here, it shouldn't matter - txid = get_random_value_hex(32) + justice_txid = get_random_value_hex(32) + dispute_txid = get_random_value_hex(32) # Assign both justice_txid and dispute_txid the same id (it shouldn't matter) - job = Job(txid, txid, None, None) + job = Job(dispute_txid, justice_txid, None, None) jobs[uuid] = job - tx_job_map[txid] = [uuid] + tx_job_map[justice_txid] = [uuid] db_manager.store_responder_job(uuid, job.to_json()) db_manager.store_update_locator_map(job.locator, uuid) @@ -68,7 +69,7 @@ def set_up_jobs(db_manager, total_jobs): uuid = uuid4().hex jobs[uuid] = job - tx_job_map[txid].append(uuid) + tx_job_map[justice_txid].append(uuid) db_manager.store_responder_job(uuid, job.to_json()) db_manager.store_update_locator_map(job.locator, uuid) @@ -129,19 +130,21 @@ def test_delete_completed_jobs_no_db_match(db_manager): # another job that is stored in the db. for uuid in selected_jobs[: ITEMS // 2]: justice_txid = jobs[uuid].justice_txid + dispute_txid = get_random_value_hex(32) new_uuid = uuid4().hex - jobs[new_uuid] = Job(justice_txid, justice_txid, None, None) + jobs[new_uuid] = Job(dispute_txid, justice_txid, None, None) tx_job_map[justice_txid].append(new_uuid) selected_jobs.append(new_uuid) # Let's add some random data for i in range(ITEMS // 2): uuid = uuid4().hex - txid = get_random_value_hex(32) + justice_txid = get_random_value_hex(32) + dispute_txid = get_random_value_hex(32) - jobs[uuid] = Job(txid, txid, None, None) - tx_job_map[txid] = [uuid] + jobs[uuid] = Job(dispute_txid, justice_txid, None, None) + tx_job_map[justice_txid] = [uuid] selected_jobs.append(uuid) completed_jobs = [(job, 6) for job in selected_jobs] diff --git a/test/unit/test_responder.py b/test/unit/test_responder.py index d8692bf..69fd8f3 100644 --- a/test/unit/test_responder.py +++ b/test/unit/test_responder.py @@ -2,11 +2,13 @@ import json import pytest import random from uuid import uuid4 +from shutil import rmtree from copy import deepcopy from threading import Thread from queue import Queue, Empty from pisa import c_logger +from pisa.db_manager import DBManager from test.simulator.utils import sha256d from pisa.responder import Responder, Job from test.simulator.bitcoind_sim import TX @@ -22,6 +24,16 @@ def responder(db_manager): return Responder(db_manager) +@pytest.fixture() +def temp_db_manager(): + db_name = get_random_value_hex(8) + db_manager = DBManager(db_name) + yield db_manager + + db_manager.db.close() + rmtree(db_name) + + def create_dummy_job_data(random_txid=False, justice_rawtx=None): # The following transaction data corresponds to a valid transaction. For some test it may be interesting to have # some valid data, but for others we may need multiple different justice_txids. @@ -274,12 +286,13 @@ def test_do_subscribe(responder): assert False -def test_do_watch(responder): - # Reinitializing responder (but keeping the subscriber) - responder.jobs = dict() - responder.tx_job_map = dict() - responder.unconfirmed_txs = [] - responder.missed_confirmations = dict() +def test_do_watch(temp_db_manager): + responder = Responder(temp_db_manager) + responder.block_queue = Queue() + + zmq_thread = Thread(target=responder.do_subscribe) + zmq_thread.daemon = True + zmq_thread.start() jobs = [create_dummy_job(justice_rawtx=TX.create_dummy_transaction()) for _ in range(20)] @@ -329,12 +342,13 @@ def test_do_watch(responder): assert responder.asleep is True -def test_check_confirmations(responder): - # Reinitializing responder (but keeping the subscriber) - responder.jobs = dict() - responder.tx_job_map = dict() - responder.unconfirmed_txs = [] - responder.missed_confirmations = dict() +def test_check_confirmations(temp_db_manager): + responder = Responder(temp_db_manager) + responder.block_queue = Queue() + + zmq_thread = Thread(target=responder.do_subscribe) + zmq_thread.daemon = True + zmq_thread.start() # check_confirmations checks, given a list of transaction for a block, what of the known justice transaction have # been confirmed. To test this we need to create a list of transactions and the state of the responder diff --git a/test/unit/test_watcher.py b/test/unit/test_watcher.py index bf5d7dd..7ab1ed1 100644 --- a/test/unit/test_watcher.py +++ b/test/unit/test_watcher.py @@ -186,7 +186,7 @@ def test_do_watch(watcher): assert watcher.asleep is True -def test_matches(watcher, txids, locator_uuid_map): +def test_get_matches(watcher, txids, locator_uuid_map): watcher.locator_uuid_map = locator_uuid_map potential_matches = watcher.get_matches(txids) @@ -194,7 +194,7 @@ def test_matches(watcher, txids, locator_uuid_map): assert locator_uuid_map.keys() == potential_matches.keys() -def test_matches_random_data(watcher, locator_uuid_map): +def test_get_matches_random_data(watcher, locator_uuid_map): # The likelihood of finding a potential match with random data should be negligible watcher.locator_uuid_map = locator_uuid_map txids = [get_random_value_hex(32) for _ in range(TEST_SET_SIZE)] From c663fab788aaa8f437464fcf4447fddc5aecb38d Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 4 Dec 2019 13:27:06 +0100 Subject: [PATCH 35/36] Return non found if the locator does not fit the proper format --- pisa/api.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pisa/api.py b/pisa/api.py index 131ce43..88cccf8 100644 --- a/pisa/api.py +++ b/pisa/api.py @@ -77,6 +77,9 @@ def get_appointment(): response = [] # ToDo: #15-add-system-monitor + if not isinstance(locator, str) or len(locator) != 64: + response.append({"locator": locator, "status": "not_found"}) + return jsonify(response) locator_map = watcher.db_manager.load_locator_map(locator) From 5e9211d56251eb3528ae7fe27356744f1ce32268 Mon Sep 17 00:00:00 2001 From: Sergi Delgado Segura Date: Wed, 4 Dec 2019 13:27:29 +0100 Subject: [PATCH 36/36] Some parameters were mixed up when boostraping from DB --- pisa/pisad.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pisa/pisad.py b/pisa/pisad.py index a408b70..ddabb7d 100644 --- a/pisa/pisad.py +++ b/pisa/pisad.py @@ -65,16 +65,16 @@ if __name__ == "__main__": missed_blocks_responder = ( missed_blocks_watcher if last_block_watcher == last_block_responder - else block_processor.get_missed_blocks(last_block_watcher) + else block_processor.get_missed_blocks(last_block_responder) ) responder = Responder(db_manager) responder.jobs, responder.tx_job_map = Builder.build_jobs(responder_jobs_data) - responder.block_queue = Builder.build_block_queue(last_block_responder) + responder.block_queue = Builder.build_block_queue(missed_blocks_responder) watcher.responder = responder watcher.appointments, watcher.locator_uuid_map = Builder.build_appointments(watcher_appointments_data) - watcher.block_queue = Builder.build_block_queue(last_block_responder) + watcher.block_queue = Builder.build_block_queue(missed_blocks_watcher) # Create an instance of the Watcher and fire the API start_api(watcher)