Uses dummy functions from conftest and extends with Appointment.triggered flag

This commit is contained in:
Sergi Delgado Segura
2019-11-05 19:13:47 +00:00
parent 4bdadc1836
commit 6cb6aecc8c
2 changed files with 24 additions and 31 deletions

View File

@@ -1,37 +1,27 @@
from uuid import uuid4
from pisa.builder import Builder
from test.unit.conftest import get_random_value_hex
from test.unit.test_api import generate_dummy_appointment
from test.unit.conftest import get_random_value_hex, generate_dummy_appointment, generate_dummy_job
def generate_dummy_job():
dispute_txid = get_random_value_hex(32)
justice_txid = get_random_value_hex(32)
justice_rawtx = get_random_value_hex(100)
return {"dispute_txid": dispute_txid, "justice_txid": justice_txid, "justice_rawtx": justice_rawtx,
"appointment_end": 100}
def test_build_appointments(run_bitcoind):
def test_build_appointments():
appointments_data = {}
# Create some appointment data
for i in range(10):
data, _ = generate_dummy_appointment()
appointment = generate_dummy_appointment()
uuid = uuid4().hex
appointments_data[uuid] = data
appointments_data[uuid] = appointment.to_dict()
# Add some additional appointments that share the same locator to test all the builder's cases
if i % 2 == 0:
locator = data["locator"]
data, _ = generate_dummy_appointment()
locator = appointment.locator
appointment = generate_dummy_appointment()
uuid = uuid4().hex
data["locator"] = locator
appointment.locator = locator
appointments_data[uuid] = data
appointments_data[uuid] = appointment.to_dict()
# Use the builder to create the data structures
appointments, locator_uuid_map = Builder.build_appointments(appointments_data)
@@ -48,17 +38,17 @@ def test_build_jobs():
# Create some jobs data
for i in range(10):
data = generate_dummy_job()
job = generate_dummy_job()
jobs_data[uuid4().hex] = data
jobs_data[uuid4().hex] = job.to_dict()
# Add some additional jobs that share the same locator to test all the builder's cases
if i % 2 == 0:
justice_txid = data["justice_txid"]
data = generate_dummy_job()
data["justice_txid"] = justice_txid
justice_txid = job.justice_txid
job = generate_dummy_job()
job.justice_txid = justice_txid
jobs_data[uuid4().hex] = data
jobs_data[uuid4().hex] = job.to_dict()
jobs, tx_job_map = Builder.build_jobs(jobs_data)
@@ -68,7 +58,6 @@ def test_build_jobs():
job_dict = job.to_dict()
# The locator is not part of the job_data found in the database (for now)
job_dict.pop('locator')
assert jobs_data[uuid] == job_dict
assert uuid in tx_job_map[job.justice_txid]

View File

@@ -5,13 +5,13 @@ import shutil
from uuid import uuid4
from pisa.db_manager import DBManager
from test.unit.conftest import get_random_value_hex
from test.unit.conftest import get_random_value_hex, generate_dummy_appointment, generate_dummy_job
from pisa.conf import WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY
@pytest.fixture(scope='module')
def watcher_appointments():
return {uuid4().hex: get_random_value_hex(32) for _ in range(10)}
return {uuid4().hex: generate_dummy_appointment() for _ in range(10)}
@pytest.fixture(scope='module')
@@ -148,15 +148,19 @@ def test_load_responder_jobs_empty(db_manager):
def test_store_load_watcher_appointment(db_manager, watcher_appointments):
for key, value in watcher_appointments.items():
db_manager.store_watcher_appointment(key, json.dumps({'value': value}))
for uuid, appointment in watcher_appointments.items():
db_manager.store_watcher_appointment(uuid, appointment.to_json())
db_watcher_appointments = db_manager.load_watcher_appointments()
values = [appointment["value"] for appointment in db_watcher_appointments.values()]
# Check that the two appointment collections are equal by checking:
# - Their size is equal
# - Each element in one collection exists in the other
assert watcher_appointments.keys() == db_watcher_appointments.keys()
assert set(watcher_appointments.values()) == set(values) and len(watcher_appointments) == len(values)
for uuid, appointment in watcher_appointments.items():
assert db_watcher_appointments[uuid] == appointment.to_dict()
def test_store_load_appointment_jobs(db_manager, responder_jobs):