Refactors the codebase to match the new naming from 793d563b8c053136dd5f936b0ef0cd88af215b06 and 3046eae38ee3f34857c96e6b9d43e645f7d2de2d

This commit is contained in:
Sergi Delgado Segura
2019-12-12 17:18:44 +01:00
parent 7c1986dfc8
commit d881706eb3
12 changed files with 144 additions and 144 deletions

View File

@@ -121,11 +121,11 @@ def get_appointment():
appointment_data["status"] = "being_watched"
response.append(appointment_data)
job_data = watcher.db_manager.load_responder_job(uuid)
tracker_data = watcher.db_manager.load_responder_tracker(uuid)
if job_data is not None:
job_data["status"] = "dispute_responded"
response.append(job_data)
if tracker_data is not None:
tracker_data["status"] = "dispute_responded"
response.append(tracker_data)
else:
response.append({"locator": locator, "status": "not_found"})
@@ -144,7 +144,7 @@ def get_all_appointments():
Returns:
``dict``: A json formatted dictionary containing all the appointments hold by the :mod:`Watcher <pisa.watcher>`
(``watcher_appointments``) and by the :mod:`Responder <pisa.responder>` (``responder_jobs``).
(``watcher_appointments``) and by the :mod:`Responder <pisa.responder>` (``responder_trackers``).
"""
@@ -153,9 +153,9 @@ def get_all_appointments():
if request.remote_addr in request.host or request.remote_addr == "127.0.0.1":
watcher_appointments = watcher.db_manager.load_watcher_appointments()
responder_jobs = watcher.db_manager.load_responder_jobs()
responder_trackers = watcher.db_manager.load_responder_trackers()
response = jsonify({"watcher_appointments": watcher_appointments, "responder_jobs": responder_jobs})
response = jsonify({"watcher_appointments": watcher_appointments, "responder_trackers": responder_trackers})
else:
abort(404)

View File

@@ -1,6 +1,6 @@
from queue import Queue
from pisa.responder import Job
from pisa.responder import TransactionTracker
from pisa.appointment import Appointment
@@ -44,38 +44,38 @@ class Builder:
return appointments, locator_uuid_map
@staticmethod
def build_jobs(jobs_data):
def build_trackers(tracker_data):
"""
Builds a jobs dictionary (``uuid: Jobs``) and a tx_job_map (``penalty_txid: uuid``) given a dictionary of jobs
from the database.
Builds a tracker dictionary (``uuid: TransactionTracker``) and a tx_tracker_map (``penalty_txid: uuid``) given
a dictionary of trackers from the database.
Args:
jobs_data (dict): a dictionary of dictionaries representing all the :mod:`Responder <pisa.responder>` jobs
stored in the database. The structure is as follows:
tracker_data (dict): a dictionary of dictionaries representing all the :mod:`Responder <pisa.responder>`
trackers stored in the database. The structure is as follows:
``{uuid: {locator: str, dispute_txid: str, ...}, uuid: {locator:...}}``
Returns:
``tuple``: A tuple with two dictionaries. ``jobs`` containing the jobs information in
:class:`Job <pisa.responder>` objects and a ``tx_job_map`` containing the map of jobs
(``penalty_txid: uuid``).
``tuple``: A tuple with two dictionaries. ``trackers`` containing the trackers' information in
:class:`TransactionTracker <pisa.responder.TransactionTracker>` objects and a ``tx_tracker_map`` containing
the map of trackers (``penalty_txid: uuid``).
"""
jobs = {}
tx_job_map = {}
trackers = {}
tx_tracker_map = {}
for uuid, data in jobs_data.items():
job = Job.from_dict(data)
jobs[uuid] = job
for uuid, data in tracker_data.items():
tracker = TransactionTracker.from_dict(data)
trackers[uuid] = tracker
if job.penalty_txid in tx_job_map:
tx_job_map[job.penalty_txid].append(uuid)
if tracker.penalty_txid in tx_tracker_map:
tx_tracker_map[tracker.penalty_txid].append(uuid)
else:
tx_job_map[job.penalty_txid] = [uuid]
tx_tracker_map[tracker.penalty_txid] = [uuid]
return jobs, tx_job_map
return trackers, tx_tracker_map
@staticmethod
def build_block_queue(missed_blocks):

View File

@@ -74,7 +74,7 @@ class Carrier:
elif errno == RPC_VERIFY_ALREADY_IN_CHAIN:
logger.info("Transaction is already in the blockchain. Getting confirmation count", txid=txid)
# If the transaction is already in the chain, we get the number of confirmations and watch the job
# If the transaction is already in the chain, we get the number of confirmations and watch the tracker
# until the end of the appointment
tx_info = self.get_transaction(txid)
@@ -122,7 +122,7 @@ class Carrier:
tx_info = None
# While it's quite unlikely, the transaction that was already in the blockchain could have been
# reorged while we were querying bitcoind to get the confirmation count. In such a case we just
# restart the job
# restart the tracker
if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY:
logger.info("Transaction not found in mempool nor blockchain", txid=txid)

View File

@@ -67,42 +67,42 @@ class Cleaner:
db_manager.store_watcher_appointment(uuid, appointment.to_json(triggered=True))
@staticmethod
def delete_completed_jobs(completed_jobs, height, jobs, tx_job_map, db_manager):
def delete_completed_trackers(completed_trackers, height, trackers, tx_tracker_map, db_manager):
"""
Deletes a completed job both from memory (:mod:`Responder <pisa.responder>`) and disk (from the
Deletes a completed tracker both from memory (:mod:`Responder <pisa.responder>`) and disk (from the
:mod:`Responder <pisa.responder>` and :mod:`Watcher <pisa.watcher>` databases).
Args:
jobs (dict): a dictionary containing all the :mod:`Responder <pisa.responder>` jobs.
tx_job_map (dict): a ``penalty_txid:uuid`` map for the :mod:`Responder <pisa.responder>` jobs.
completed_jobs (list): a list of completed jobs to be deleted.
height (int): the block height at which the jobs were completed.
trackers (dict): a dictionary containing all the :mod:`Responder <pisa.responder>` trackers.
tx_tracker_map (dict): a ``penalty_txid:uuid`` map for the :mod:`Responder <pisa.responder>` trackers.
completed_trackers (list): a list of completed trackers to be deleted.
height (int): the block height at which the trackers were completed.
db_manager (DBManager): a :mod:`DBManager <pisa.db_manager>` instance to interact with the database.
"""
for uuid, confirmations in completed_jobs:
for uuid, confirmations in completed_trackers:
logger.info(
"Job completed. Appointment ended after reaching enough confirmations.",
"Appointment completed. Appointment ended after reaching enough confirmations.",
uuid=uuid,
height=height,
confirmations=confirmations,
)
penalty_txid = jobs[uuid].penalty_txid
locator = jobs[uuid].locator
jobs.pop(uuid)
penalty_txid = trackers[uuid].penalty_txid
locator = trackers[uuid].locator
trackers.pop(uuid)
if len(tx_job_map[penalty_txid]) == 1:
tx_job_map.pop(penalty_txid)
if len(tx_tracker_map[penalty_txid]) == 1:
tx_tracker_map.pop(penalty_txid)
logger.info("No more jobs for penalty transaction.", penalty_txid=penalty_txid)
logger.info("No more trackers for penalty transaction.", penalty_txid=penalty_txid)
else:
tx_job_map[penalty_txid].remove(uuid)
tx_tracker_map[penalty_txid].remove(uuid)
# Delete appointment from the db (both watchers's and responder's)
db_manager.delete_watcher_appointment(uuid)
db_manager.delete_responder_job(uuid)
db_manager.delete_responder_tracker(uuid)
# Update / delete the locator map
locator_map = db_manager.load_locator_map(locator)

View File

@@ -20,7 +20,7 @@ class DBManager:
The database is split in five prefixes:
- ``WATCHER_PREFIX``, defined as ``b'w``, is used to store :mod:`Watcher <pisa.watcher>` appointments.
- ``RESPONDER_PREFIX``, defines as ``b'r``, is used to store :mod:`Responder <pisa.responder>` jobs.
- ``RESPONDER_PREFIX``, defines as ``b'r``, is used to store :mod:`Responder <pisa.responder>` trackers.
- ``WATCHER_LAST_BLOCK_KEY``, defined as ``b'bw``, is used to store the last block hash known by the :mod:`Watcher <pisa.watcher>`.
- ``RESPONDER_LAST_BLOCK_KEY``, defined as ``b'br``, is used to store the last block hash known by the :mod:`Responder <pisa.responder>`.
- ``LOCATOR_MAP_PREFIX``, defined as ``b'm``, is used to store the ``locator:uuid`` maps.
@@ -51,7 +51,7 @@ class DBManager:
prefix (str): the prefix of the data to load.
Returns:
(``dict``): A dictionary containing the requested data (appointments or jobs) indexed by ``uuid``.
(``dict``): A dictionary containing the requested data (appointments or trackers) indexed by ``uuid``.
Returns an empty dictionary if no data is found.
"""
@@ -108,7 +108,7 @@ class DBManager:
key (str): the key that identifies the entry to be loaded.
Returns:
(``dict`` or ``None``): A dictionary containing the requested data (an appointment or a job).
(``dict`` or ``None``): A dictionary containing the requested data (an appointment or a tracker).
Returns ``None`` if the entry is not found.
"""
@@ -145,12 +145,12 @@ class DBManager:
return self.load_entry(WATCHER_PREFIX + key)
def load_responder_job(self, key):
def load_responder_tracker(self, key):
"""
Loads a job from the database using ``RESPONDER_PREFIX`` as a prefix to the given ``key``.
Loads a tracker from the database using ``RESPONDER_PREFIX`` as a prefix to the given ``key``.
Returns:
(``dict``): A dictionary containing the job data if they ``key`` is found.
(``dict``): A dictionary containing the tracker data if they ``key`` is found.
Returns ``None`` otherwise.
"""
@@ -179,12 +179,12 @@ class DBManager:
return appointments
def load_responder_jobs(self):
def load_responder_trackers(self):
"""
Loads all the jobs from the database (all entries with the ``RESPONDER_PREFIX`` prefix).
Loads all the trackers from the database (all entries with the ``RESPONDER_PREFIX`` prefix).
Returns:
(``dict``): A dictionary with all the jobs stored in the database. An empty dictionary is there are
(``dict``): A dictionary with all the trackers stored in the database. An empty dictionary is there are
none.
"""
@@ -198,12 +198,12 @@ class DBManager:
self.create_entry(uuid, appointment, prefix=WATCHER_PREFIX)
logger.info("Adding appointment to Watchers's db", uuid=uuid)
def store_responder_job(self, uuid, job):
def store_responder_tracker(self, uuid, tracker):
"""
Stores a job in the database using the ``RESPONDER_PREFIX`` prefix.
Stores a tracker in the database using the ``RESPONDER_PREFIX`` prefix.
"""
self.create_entry(uuid, job, prefix=RESPONDER_PREFIX)
self.create_entry(uuid, tracker, prefix=RESPONDER_PREFIX)
logger.info("Adding appointment to Responder's db", uuid=uuid)
def load_locator_map(self, locator):
@@ -280,12 +280,12 @@ class DBManager:
self.delete_entry(uuid, prefix=WATCHER_PREFIX)
logger.info("Deleting appointment from Watcher's db", uuid=uuid)
def delete_responder_job(self, uuid):
def delete_responder_tracker(self, uuid):
"""
Deletes a job from the database.
Deletes a tracker from the database.
Args:
uuid (str): a 16-byte hex-encoded string identifying the job to be deleted.
uuid (str): a 16-byte hex-encoded string identifying the tracker to be deleted.
"""
self.delete_entry(uuid, prefix=RESPONDER_PREFIX)

View File

@@ -47,11 +47,11 @@ if __name__ == "__main__":
db_manager = DBManager(DB_PATH)
watcher_appointments_data = db_manager.load_watcher_appointments()
responder_jobs_data = db_manager.load_responder_jobs()
responder_trackers_data = db_manager.load_responder_trackers()
watcher = Watcher(db_manager)
if len(watcher_appointments_data) == 0 and len(responder_jobs_data) == 0:
if len(watcher_appointments_data) == 0 and len(responder_trackers_data) == 0:
logger.info("Fresh bootstrap")
else:
@@ -69,7 +69,7 @@ if __name__ == "__main__":
)
responder = Responder(db_manager)
responder.jobs, responder.tx_job_map = Builder.build_jobs(responder_jobs_data)
responder.trackers, responder.tx_tracker_map = Builder.build_trackers(responder_trackers_data)
responder.block_queue = Builder.build_block_queue(missed_blocks_responder)
watcher.responder = responder

View File

@@ -9,7 +9,7 @@ from pisa.logger import Logger
from pisa.cleaner import Cleaner
from pisa.responder import Responder
from pisa.block_processor import BlockProcessor
from pisa.utils.zmq_subscriber import ZMQHandler
from pisa.utils.zmq_subscriber import ZMQSubscriber
from pisa.conf import EXPIRY_DELTA, MAX_APPOINTMENTS, PISA_SECRET_KEY
logger = Logger("Watcher")
@@ -89,7 +89,7 @@ class Watcher:
return appointment_added, signature
def do_subscribe(self):
self.zmq_subscriber = ZMQHandler(parent="Watcher")
self.zmq_subscriber = ZMQSubscriber(parent="Watcher")
self.zmq_subscriber.handle(self.block_queue)
def do_watch(self):
@@ -126,7 +126,7 @@ class Watcher:
uuid=uuid,
)
self.responder.add_response(
self.responder.handle_breach(
uuid,
filtered_match["locator"],
filtered_match["dispute_txid"],

View File

@@ -11,7 +11,7 @@ from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives import serialization
from apps.cli.blob import Blob
from pisa.responder import Job
from pisa.responder import TransactionTracker
from pisa.watcher import Watcher
from pisa.tools import bitcoin_cli
from pisa.db_manager import DBManager
@@ -126,13 +126,13 @@ def generate_dummy_appointment(real_height=True, start_time_offset=5, end_time_o
return Appointment.from_dict(appointment_data["appointment"]), dispute_tx
def generate_dummy_job():
def generate_dummy_tracker():
dispute_txid = get_random_value_hex(32)
penalty_txid = get_random_value_hex(32)
penalty_rawtx = get_random_value_hex(100)
locator = dispute_txid[:LOCATOR_LEN_HEX]
job_data = dict(
tracker_data = dict(
locator=locator,
dispute_txid=dispute_txid,
penalty_txid=penalty_txid,
@@ -140,4 +140,4 @@ def generate_dummy_job():
appointment_end=100,
)
return Job.from_dict(job_data)
return TransactionTracker.from_dict(tracker_data)

View File

@@ -109,7 +109,7 @@ def test_get_all_appointments_watcher():
local_locators = [appointment["locator"] for appointment in appointments]
assert set(watcher_locators) == set(local_locators)
assert len(received_appointments["responder_jobs"]) == 0
assert len(received_appointments["responder_trackers"]) == 0
def test_get_all_appointments_responder():
@@ -127,10 +127,10 @@ def test_get_all_appointments_responder():
received_appointments = json.loads(r.content)
# Make sure there is not pending locator in the watcher
responder_jobs = [v["locator"] for k, v in received_appointments["responder_jobs"].items()]
responder_trackers = [v["locator"] for k, v in received_appointments["responder_trackers"].items()]
local_locators = [appointment["locator"] for appointment in appointments]
assert set(responder_jobs) == set(local_locators)
assert set(responder_trackers) == set(local_locators)
assert len(received_appointments["watcher_appointments"]) == 0

View File

@@ -1,7 +1,7 @@
from uuid import uuid4
from pisa.builder import Builder
from test.unit.conftest import get_random_value_hex, generate_dummy_appointment, generate_dummy_job
from test.unit.conftest import get_random_value_hex, generate_dummy_appointment, generate_dummy_tracker
def test_build_appointments():
@@ -33,33 +33,33 @@ def test_build_appointments():
assert uuid in locator_uuid_map[appointment.locator]
def test_build_jobs():
jobs_data = {}
def test_build_trackers():
trackers_data = {}
# Create some jobs data
# Create some trackers data
for i in range(10):
job = generate_dummy_job()
tracker = generate_dummy_tracker()
jobs_data[uuid4().hex] = job.to_dict()
trackers_data[uuid4().hex] = tracker.to_dict()
# Add some additional jobs that share the same locator to test all the builder's cases
# Add some additional trackers that share the same locator to test all the builder's cases
if i % 2 == 0:
penalty_txid = job.penalty_txid
job = generate_dummy_job()
job.penalty_txid = penalty_txid
penalty_txid = tracker.penalty_txid
tracker = generate_dummy_tracker()
tracker.penalty_txid = penalty_txid
jobs_data[uuid4().hex] = job.to_dict()
trackers_data[uuid4().hex] = tracker.to_dict()
jobs, tx_job_map = Builder.build_jobs(jobs_data)
trackers, tx_tracker_map = Builder.build_trackers(trackers_data)
# Check that the built jobs match the data
for uuid, job in jobs.items():
assert uuid in jobs_data.keys()
job_dict = job.to_dict()
# Check that the built trackers match the data
for uuid, tracker in trackers.items():
assert uuid in trackers_data.keys()
tracker_dict = tracker.to_dict()
# The locator is not part of the job_data found in the database (for now)
assert jobs_data[uuid] == job_dict
assert uuid in tx_job_map[job.penalty_txid]
# The locator is not part of the tracker_data found in the database (for now)
assert trackers_data[uuid] == tracker_dict
assert uuid in tx_tracker_map[tracker.penalty_txid]
def test_build_block_queue():

View File

@@ -2,7 +2,7 @@ import random
from uuid import uuid4
from pisa import c_logger
from pisa.responder import Job
from pisa.responder import TransactionTracker
from pisa.cleaner import Cleaner
from pisa.appointment import Appointment
from pisa.db_manager import WATCHER_PREFIX
@@ -48,11 +48,11 @@ def set_up_appointments(db_manager, total_appointments):
return appointments, locator_uuid_map
def set_up_jobs(db_manager, total_jobs):
jobs = dict()
tx_job_map = dict()
def set_up_trackers(db_manager, total_trackers):
trackers = dict()
tx_tracker_map = dict()
for i in range(total_jobs):
for i in range(total_trackers):
uuid = uuid4().hex
# We use the same txid for penalty and dispute here, it shouldn't matter
@@ -61,24 +61,24 @@ def set_up_jobs(db_manager, total_jobs):
locator = dispute_txid[:LOCATOR_LEN_HEX]
# Assign both penalty_txid and dispute_txid the same id (it shouldn't matter)
job = Job(locator, dispute_txid, penalty_txid, None, None)
jobs[uuid] = job
tx_job_map[penalty_txid] = [uuid]
tracker = TransactionTracker(locator, dispute_txid, penalty_txid, None, None)
trackers[uuid] = tracker
tx_tracker_map[penalty_txid] = [uuid]
db_manager.store_responder_job(uuid, job.to_json())
db_manager.store_update_locator_map(job.locator, uuid)
db_manager.store_responder_tracker(uuid, tracker.to_json())
db_manager.store_update_locator_map(tracker.locator, uuid)
# Each penalty_txid can have more than one uuid assigned to it.
if i % 2:
uuid = uuid4().hex
jobs[uuid] = job
tx_job_map[penalty_txid].append(uuid)
trackers[uuid] = tracker
tx_tracker_map[penalty_txid].append(uuid)
db_manager.store_responder_job(uuid, job.to_json())
db_manager.store_update_locator_map(job.locator, uuid)
db_manager.store_responder_tracker(uuid, tracker.to_json())
db_manager.store_update_locator_map(tracker.locator, uuid)
return jobs, tx_job_map
return trackers, tx_tracker_map
def test_delete_expired_appointment(db_manager):
@@ -107,38 +107,38 @@ def test_delete_completed_appointments(db_manager):
assert db_appointments[uuid]["triggered"] is True
def test_delete_completed_jobs_db_match(db_manager):
def test_delete_completed_trackers_db_match(db_manager):
height = 0
for _ in range(ITERATIONS):
jobs, tx_job_map = set_up_jobs(db_manager, MAX_ITEMS)
selected_jobs = random.sample(list(jobs.keys()), k=ITEMS)
trackers, tx_tracker_map = set_up_trackers(db_manager, MAX_ITEMS)
selected_trackers = random.sample(list(trackers.keys()), k=ITEMS)
completed_jobs = [(job, 6) for job in selected_jobs]
completed_trackers = [(tracker, 6) for tracker in selected_trackers]
Cleaner.delete_completed_jobs(completed_jobs, height, jobs, tx_job_map, db_manager)
Cleaner.delete_completed_trackers(completed_trackers, height, trackers, tx_tracker_map, db_manager)
assert not set(completed_jobs).issubset(jobs.keys())
assert not set(completed_trackers).issubset(trackers.keys())
def test_delete_completed_jobs_no_db_match(db_manager):
def test_delete_completed_trackers_no_db_match(db_manager):
height = 0
for _ in range(ITERATIONS):
jobs, tx_job_map = set_up_jobs(db_manager, MAX_ITEMS)
selected_jobs = random.sample(list(jobs.keys()), k=ITEMS)
trackers, tx_tracker_map = set_up_trackers(db_manager, MAX_ITEMS)
selected_trackers = random.sample(list(trackers.keys()), k=ITEMS)
# Let's change some uuid's by creating new jobs that are not included in the db and share a penalty_txid with
# another job that is stored in the db.
for uuid in selected_jobs[: ITEMS // 2]:
penalty_txid = jobs[uuid].penalty_txid
# Let's change some uuid's by creating new trackers that are not included in the db and share a penalty_txid
# with another tracker that is stored in the db.
for uuid in selected_trackers[: ITEMS // 2]:
penalty_txid = trackers[uuid].penalty_txid
dispute_txid = get_random_value_hex(32)
locator = dispute_txid[:LOCATOR_LEN_HEX]
new_uuid = uuid4().hex
jobs[new_uuid] = Job(locator, dispute_txid, penalty_txid, None, None)
tx_job_map[penalty_txid].append(new_uuid)
selected_jobs.append(new_uuid)
trackers[new_uuid] = TransactionTracker(locator, dispute_txid, penalty_txid, None, None)
tx_tracker_map[penalty_txid].append(new_uuid)
selected_trackers.append(new_uuid)
# Let's add some random data
for i in range(ITEMS // 2):
@@ -147,12 +147,12 @@ def test_delete_completed_jobs_no_db_match(db_manager):
dispute_txid = get_random_value_hex(32)
locator = dispute_txid[:LOCATOR_LEN_HEX]
jobs[uuid] = Job(locator, dispute_txid, penalty_txid, None, None)
tx_job_map[penalty_txid] = [uuid]
selected_jobs.append(uuid)
trackers[uuid] = TransactionTracker(locator, dispute_txid, penalty_txid, None, None)
tx_tracker_map[penalty_txid] = [uuid]
selected_trackers.append(uuid)
completed_jobs = [(job, 6) for job in selected_jobs]
completed_trackers = [(tracker, 6) for tracker in selected_trackers]
# We should be able to delete the correct ones and not fail in the others
Cleaner.delete_completed_jobs(completed_jobs, height, jobs, tx_job_map, db_manager)
assert not set(completed_jobs).issubset(jobs.keys())
Cleaner.delete_completed_trackers(completed_trackers, height, trackers, tx_tracker_map, db_manager)
assert not set(completed_trackers).issubset(trackers.keys())

View File

@@ -18,7 +18,7 @@ def watcher_appointments():
@pytest.fixture(scope="module")
def responder_jobs():
def responder_trackers():
return {get_random_value_hex(16): get_random_value_hex(32) for _ in range(10)}
@@ -158,8 +158,8 @@ def test_load_watcher_appointments_empty(db_manager):
assert len(db_manager.load_watcher_appointments()) == 0
def test_load_responder_jobs_empty(db_manager):
assert len(db_manager.load_responder_jobs()) == 0
def test_load_responder_trackers_empty(db_manager):
assert len(db_manager.load_responder_trackers()) == 0
def test_load_locator_map_empty(db_manager):
@@ -228,16 +228,16 @@ def test_store_load_triggered_appointment(db_manager):
assert uuid in db_manager.load_watcher_appointments(include_triggered=True)
def test_store_load_responder_jobs(db_manager, responder_jobs):
for key, value in responder_jobs.items():
db_manager.store_responder_job(key, json.dumps({"value": value}))
def test_store_load_responder_trackers(db_manager, responder_trackers):
for key, value in responder_trackers.items():
db_manager.store_responder_tracker(key, json.dumps({"value": value}))
db_responder_jobs = db_manager.load_responder_jobs()
db_responder_trackers = db_manager.load_responder_trackers()
values = [job["value"] for job in db_responder_jobs.values()]
values = [tracker["value"] for tracker in db_responder_trackers.values()]
assert responder_jobs.keys() == db_responder_jobs.keys()
assert set(responder_jobs.values()) == set(values) and len(responder_jobs) == len(values)
assert responder_trackers.keys() == db_responder_trackers.keys()
assert set(responder_trackers.values()) == set(values) and len(responder_trackers) == len(values)
def test_delete_watcher_appointment(db_manager, watcher_appointments):
@@ -252,16 +252,16 @@ def test_delete_watcher_appointment(db_manager, watcher_appointments):
assert len(db_watcher_appointments) == 0
def test_delete_responder_job(db_manager, responder_jobs):
def test_delete_responder_tracker(db_manager, responder_trackers):
# Same for the responder
db_responder_jobs = db_manager.load_responder_jobs()
assert len(db_responder_jobs) != 0
db_responder_trackers = db_manager.load_responder_trackers()
assert len(db_responder_trackers) != 0
for key in responder_jobs.keys():
db_manager.delete_responder_job(key)
for key in responder_trackers.keys():
db_manager.delete_responder_tracker(key)
db_responder_jobs = db_manager.load_responder_jobs()
assert len(db_responder_jobs) == 0
db_responder_trackers = db_manager.load_responder_trackers()
assert len(db_responder_trackers) == 0
def test_store_load_last_block_hash_watcher(db_manager):