Addresses requested changes

This commit is contained in:
Sergi Delgado Segura
2019-11-22 14:48:38 +00:00
parent 2183c57f53
commit d2b7216a96
5 changed files with 49 additions and 29 deletions

View File

@@ -35,7 +35,7 @@ class Cryptographer:
"Creating new blob.", "Creating new blob.",
master_key=hexlify(master_key).decode(), master_key=hexlify(master_key).decode(),
sk=hexlify(sk).decode(), sk=hexlify(sk).decode(),
nonce=hexlify(sk).decode(), nonce=hexlify(nonce).decode(),
encrypted_blob=encrypted_blob.data, encrypted_blob=encrypted_blob.data,
) )

View File

@@ -111,14 +111,17 @@ def test_from_dict(appointment_data):
assert isinstance(appointment, Appointment) assert isinstance(appointment, Appointment)
# Otherwise it should fail # Otherwise it should fail
appointment_data["hash_function"] = None for key in appointment_data.keys():
prev_val = appointment_data[key]
appointment_data[key] = None
try: try:
Appointment.from_dict(appointment_data) Appointment.from_dict(appointment_data)
assert False assert False
except ValueError: except ValueError:
assert True appointment_data[key] = prev_val
assert True
def test_serialize(appointment_data): def test_serialize(appointment_data):

View File

@@ -53,12 +53,13 @@ def set_up_jobs(db_manager, total_jobs):
uuid = uuid4().hex uuid = uuid4().hex
# We use the same txid for justice and dispute here, it shouldn't matter # We use the same txid for justice and dispute here, it shouldn't matter
txid = get_random_value_hex(32) justice_txid = get_random_value_hex(32)
dispute_txid = get_random_value_hex(32)
# Assign both justice_txid and dispute_txid the same id (it shouldn't matter) # Assign both justice_txid and dispute_txid the same id (it shouldn't matter)
job = Job(txid, txid, None, None) job = Job(dispute_txid, justice_txid, None, None)
jobs[uuid] = job jobs[uuid] = job
tx_job_map[txid] = [uuid] tx_job_map[justice_txid] = [uuid]
db_manager.store_responder_job(uuid, job.to_json()) db_manager.store_responder_job(uuid, job.to_json())
db_manager.store_update_locator_map(job.locator, uuid) db_manager.store_update_locator_map(job.locator, uuid)
@@ -68,7 +69,7 @@ def set_up_jobs(db_manager, total_jobs):
uuid = uuid4().hex uuid = uuid4().hex
jobs[uuid] = job jobs[uuid] = job
tx_job_map[txid].append(uuid) tx_job_map[justice_txid].append(uuid)
db_manager.store_responder_job(uuid, job.to_json()) db_manager.store_responder_job(uuid, job.to_json())
db_manager.store_update_locator_map(job.locator, uuid) db_manager.store_update_locator_map(job.locator, uuid)
@@ -129,19 +130,21 @@ def test_delete_completed_jobs_no_db_match(db_manager):
# another job that is stored in the db. # another job that is stored in the db.
for uuid in selected_jobs[: ITEMS // 2]: for uuid in selected_jobs[: ITEMS // 2]:
justice_txid = jobs[uuid].justice_txid justice_txid = jobs[uuid].justice_txid
dispute_txid = get_random_value_hex(32)
new_uuid = uuid4().hex new_uuid = uuid4().hex
jobs[new_uuid] = Job(justice_txid, justice_txid, None, None) jobs[new_uuid] = Job(dispute_txid, justice_txid, None, None)
tx_job_map[justice_txid].append(new_uuid) tx_job_map[justice_txid].append(new_uuid)
selected_jobs.append(new_uuid) selected_jobs.append(new_uuid)
# Let's add some random data # Let's add some random data
for i in range(ITEMS // 2): for i in range(ITEMS // 2):
uuid = uuid4().hex uuid = uuid4().hex
txid = get_random_value_hex(32) justice_txid = get_random_value_hex(32)
dispute_txid = get_random_value_hex(32)
jobs[uuid] = Job(txid, txid, None, None) jobs[uuid] = Job(dispute_txid, justice_txid, None, None)
tx_job_map[txid] = [uuid] tx_job_map[justice_txid] = [uuid]
selected_jobs.append(uuid) selected_jobs.append(uuid)
completed_jobs = [(job, 6) for job in selected_jobs] completed_jobs = [(job, 6) for job in selected_jobs]

View File

@@ -2,11 +2,13 @@ import json
import pytest import pytest
import random import random
from uuid import uuid4 from uuid import uuid4
from shutil import rmtree
from copy import deepcopy from copy import deepcopy
from threading import Thread from threading import Thread
from queue import Queue, Empty from queue import Queue, Empty
from pisa import c_logger from pisa import c_logger
from pisa.db_manager import DBManager
from test.simulator.utils import sha256d from test.simulator.utils import sha256d
from pisa.responder import Responder, Job from pisa.responder import Responder, Job
from test.simulator.bitcoind_sim import TX from test.simulator.bitcoind_sim import TX
@@ -22,6 +24,16 @@ def responder(db_manager):
return Responder(db_manager) return Responder(db_manager)
@pytest.fixture()
def temp_db_manager():
db_name = get_random_value_hex(8)
db_manager = DBManager(db_name)
yield db_manager
db_manager.db.close()
rmtree(db_name)
def create_dummy_job_data(random_txid=False, justice_rawtx=None): def create_dummy_job_data(random_txid=False, justice_rawtx=None):
# The following transaction data corresponds to a valid transaction. For some test it may be interesting to have # The following transaction data corresponds to a valid transaction. For some test it may be interesting to have
# some valid data, but for others we may need multiple different justice_txids. # some valid data, but for others we may need multiple different justice_txids.
@@ -274,12 +286,13 @@ def test_do_subscribe(responder):
assert False assert False
def test_do_watch(responder): def test_do_watch(temp_db_manager):
# Reinitializing responder (but keeping the subscriber) responder = Responder(temp_db_manager)
responder.jobs = dict() responder.block_queue = Queue()
responder.tx_job_map = dict()
responder.unconfirmed_txs = [] zmq_thread = Thread(target=responder.do_subscribe)
responder.missed_confirmations = dict() zmq_thread.daemon = True
zmq_thread.start()
jobs = [create_dummy_job(justice_rawtx=TX.create_dummy_transaction()) for _ in range(20)] jobs = [create_dummy_job(justice_rawtx=TX.create_dummy_transaction()) for _ in range(20)]
@@ -329,12 +342,13 @@ def test_do_watch(responder):
assert responder.asleep is True assert responder.asleep is True
def test_check_confirmations(responder): def test_check_confirmations(temp_db_manager):
# Reinitializing responder (but keeping the subscriber) responder = Responder(temp_db_manager)
responder.jobs = dict() responder.block_queue = Queue()
responder.tx_job_map = dict()
responder.unconfirmed_txs = [] zmq_thread = Thread(target=responder.do_subscribe)
responder.missed_confirmations = dict() zmq_thread.daemon = True
zmq_thread.start()
# check_confirmations checks, given a list of transaction for a block, what of the known justice transaction have # check_confirmations checks, given a list of transaction for a block, what of the known justice transaction have
# been confirmed. To test this we need to create a list of transactions and the state of the responder # been confirmed. To test this we need to create a list of transactions and the state of the responder

View File

@@ -186,7 +186,7 @@ def test_do_watch(watcher):
assert watcher.asleep is True assert watcher.asleep is True
def test_matches(watcher, txids, locator_uuid_map): def test_get_matches(watcher, txids, locator_uuid_map):
watcher.locator_uuid_map = locator_uuid_map watcher.locator_uuid_map = locator_uuid_map
potential_matches = watcher.get_matches(txids) potential_matches = watcher.get_matches(txids)
@@ -194,7 +194,7 @@ def test_matches(watcher, txids, locator_uuid_map):
assert locator_uuid_map.keys() == potential_matches.keys() assert locator_uuid_map.keys() == potential_matches.keys()
def test_matches_random_data(watcher, locator_uuid_map): def test_get_matches_random_data(watcher, locator_uuid_map):
# The likelihood of finding a potential match with random data should be negligible # The likelihood of finding a potential match with random data should be negligible
watcher.locator_uuid_map = locator_uuid_map watcher.locator_uuid_map = locator_uuid_map
txids = [get_random_value_hex(32) for _ in range(TEST_SET_SIZE)] txids = [get_random_value_hex(32) for _ in range(TEST_SET_SIZE)]