Renames dispute_delta to to_self_delay and justice tx to penalty tx

Uses the naming convention followed by LN implementations and BOLTs
This commit is contained in:
Sergi Delgado Segura
2019-12-11 12:45:05 +01:00
parent 6a7cd4d3aa
commit df5dcbdfe9
16 changed files with 203 additions and 194 deletions

View File

@@ -12,7 +12,7 @@ class Appointment:
for the tower to decrypt and broadcast the penalty transaction.
start_time (int): The block height at which the tower is hired to start watching for breaches.
end_time (int): The block height at which the tower will stop watching for breaches.
dispute_delta (int): The ``to_self_delay`` encoded in the ``csv`` of the ``htlc`` that this appointment is
to_self_delay (int): The ``to_self_delay`` encoded in the ``csv`` of the ``htlc`` that this appointment is
covering.
encrypted_blob (EncryptedBlob): An :mod:`EncryptedBlob <pisa.encrypted_blob>` object containing an encrypted
penalty transaction. The tower will decrypt it and broadcast the penalty transaction upon seeing a breach on
@@ -20,11 +20,11 @@ class Appointment:
"""
# DISCUSS: 35-appointment-checks
def __init__(self, locator, start_time, end_time, dispute_delta, encrypted_blob):
def __init__(self, locator, start_time, end_time, to_self_delay, encrypted_blob):
self.locator = locator
self.start_time = start_time # ToDo: #4-standardize-appointment-fields
self.end_time = end_time # ToDo: #4-standardize-appointment-fields
self.dispute_delta = dispute_delta
self.to_self_delay = to_self_delay
self.encrypted_blob = EncryptedBlob(encrypted_blob)
@classmethod
@@ -36,7 +36,7 @@ class Appointment:
Args:
appointment_data (dict): a dictionary containing the following keys:
``{locator, start_time, end_time, dispute_delta, encrypted_blob}``
``{locator, start_time, end_time, to_self_delay, encrypted_blob}``
Returns:
``Appointment``: An appointment initialized using the provided data.
@@ -48,14 +48,14 @@ class Appointment:
locator = appointment_data.get("locator")
start_time = appointment_data.get("start_time") # ToDo: #4-standardize-appointment-fields
end_time = appointment_data.get("end_time") # ToDo: #4-standardize-appointment-fields
dispute_delta = appointment_data.get("dispute_delta")
to_self_delay = appointment_data.get("to_self_delay")
encrypted_blob_data = appointment_data.get("encrypted_blob")
if any(v is None for v in [locator, start_time, end_time, dispute_delta, encrypted_blob_data]):
if any(v is None for v in [locator, start_time, end_time, to_self_delay, encrypted_blob_data]):
raise ValueError("Wrong appointment data, some fields are missing")
else:
appointment = cls(locator, start_time, end_time, dispute_delta, encrypted_blob_data)
appointment = cls(locator, start_time, end_time, to_self_delay, encrypted_blob_data)
return appointment
@@ -73,7 +73,7 @@ class Appointment:
"locator": self.locator,
"start_time": self.start_time,
"end_time": self.end_time,
"dispute_delta": self.dispute_delta,
"to_self_delay": self.to_self_delay,
"encrypted_blob": self.encrypted_blob.data,
}

View File

@@ -69,11 +69,11 @@ class Builder:
job = Job.from_dict(data)
jobs[uuid] = job
if job.justice_txid in tx_job_map:
tx_job_map[job.justice_txid].append(uuid)
if job.penalty_txid in tx_job_map:
tx_job_map[job.penalty_txid].append(uuid)
else:
tx_job_map[job.justice_txid] = [uuid]
tx_job_map[job.penalty_txid] = [uuid]
return jobs, tx_job_map

View File

@@ -88,17 +88,17 @@ class Cleaner:
confirmations=confirmations,
)
justice_txid = jobs[uuid].justice_txid
penalty_txid = jobs[uuid].penalty_txid
locator = jobs[uuid].locator
jobs.pop(uuid)
if len(tx_job_map[justice_txid]) == 1:
tx_job_map.pop(justice_txid)
if len(tx_job_map[penalty_txid]) == 1:
tx_job_map.pop(penalty_txid)
logger.info("No more jobs for justice transaction.", justice_txid=justice_txid)
logger.info("No more jobs for penalty transaction.", penalty_txid=penalty_txid)
else:
tx_job_map[justice_txid].remove(uuid)
tx_job_map[penalty_txid].remove(uuid)
# Delete appointment from the db (both watchers's and responder's)
db_manager.delete_watcher_appointment(uuid)

View File

@@ -15,26 +15,26 @@ logger = Logger("Responder")
class Job:
def __init__(self, locator, dispute_txid, justice_txid, justice_rawtx, appointment_end):
def __init__(self, locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end):
self.locator = locator
self.dispute_txid = dispute_txid
self.justice_txid = justice_txid
self.justice_rawtx = justice_rawtx
self.penalty_txid = penalty_txid
self.penalty_rawtx = penalty_rawtx
self.appointment_end = appointment_end
@classmethod
def from_dict(cls, job_data):
locator = job_data.get("locator")
dispute_txid = job_data.get("dispute_txid")
justice_txid = job_data.get("justice_txid")
justice_rawtx = job_data.get("justice_rawtx")
penalty_txid = job_data.get("penalty_txid")
penalty_rawtx = job_data.get("penalty_rawtx")
appointment_end = job_data.get("appointment_end")
if any(v is None for v in [locator, dispute_txid, justice_txid, justice_rawtx, appointment_end]):
if any(v is None for v in [locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end]):
raise ValueError("Wrong job data, some fields are missing")
else:
job = cls(locator, dispute_txid, justice_txid, justice_rawtx, appointment_end)
job = cls(locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end)
return job
@@ -42,8 +42,8 @@ class Job:
job = {
"locator": self.locator,
"dispute_txid": self.dispute_txid,
"justice_txid": self.justice_txid,
"justice_rawtx": self.justice_rawtx,
"penalty_txid": self.penalty_txid,
"penalty_rawtx": self.penalty_rawtx,
"appointment_end": self.appointment_end,
}
@@ -78,20 +78,20 @@ class Responder:
return synchronized
def add_response(
self, uuid, locator, dispute_txid, justice_txid, justice_rawtx, appointment_end, block_hash, retry=False
self, uuid, locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end, block_hash, retry=False
):
if self.asleep:
logger.info("Waking up")
carrier = Carrier()
receipt = carrier.send_transaction(justice_rawtx, justice_txid)
receipt = carrier.send_transaction(penalty_rawtx, penalty_txid)
# do_watch can call add_response recursively if a broadcast transaction does not get confirmations
# retry holds that information. If retry is true the job already exists
if receipt.delivered:
if not retry:
self.create_job(
uuid, locator, dispute_txid, justice_txid, justice_rawtx, appointment_end, receipt.confirmations
uuid, locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end, receipt.confirmations
)
else:
@@ -102,24 +102,24 @@ class Responder:
return receipt
def create_job(self, uuid, locator, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations=0):
job = Job(locator, dispute_txid, justice_txid, justice_rawtx, appointment_end)
def create_job(self, uuid, locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end, confirmations=0):
job = Job(locator, dispute_txid, penalty_txid, penalty_rawtx, appointment_end)
self.jobs[uuid] = job
if justice_txid in self.tx_job_map:
self.tx_job_map[justice_txid].append(uuid)
if penalty_txid in self.tx_job_map:
self.tx_job_map[penalty_txid].append(uuid)
else:
self.tx_job_map[justice_txid] = [uuid]
self.tx_job_map[penalty_txid] = [uuid]
# In the case we receive two jobs with the same justice txid we only add it to the unconfirmed txs list once
if justice_txid not in self.unconfirmed_txs and confirmations == 0:
self.unconfirmed_txs.append(justice_txid)
# In the case we receive two jobs with the same penalty txid we only add it to the unconfirmed txs list once
if penalty_txid not in self.unconfirmed_txs and confirmations == 0:
self.unconfirmed_txs.append(penalty_txid)
self.db_manager.store_responder_job(uuid, job.to_json())
logger.info(
"New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid, appointment_end=appointment_end
"New job added.", dispute_txid=dispute_txid, penalty_txid=penalty_txid, appointment_end=appointment_end
)
if self.asleep:
@@ -215,8 +215,8 @@ class Responder:
completed_jobs = []
for uuid, job in self.jobs.items():
if job.appointment_end <= height and job.justice_txid not in self.unconfirmed_txs:
tx = Carrier.get_transaction(job.justice_txid)
if job.appointment_end <= height and job.penalty_txid not in self.unconfirmed_txs:
tx = Carrier.get_transaction(job.penalty_txid)
# FIXME: Should be improved with the librarian
if tx is not None:
@@ -243,8 +243,8 @@ class Responder:
job.locator,
uuid,
job.dispute_txid,
job.justice_txid,
job.justice_rawtx,
job.penalty_txid,
job.penalty_rawtx,
job.appointment_end,
block_hash,
retry=True,
@@ -252,7 +252,7 @@ class Responder:
logger.warning(
"Transaction has missed many confirmations. Rebroadcasting.",
justice_txid=job.justice_txid,
penalty_txid=job.penalty_txid,
confirmations_missed=CONFIRMATIONS_BEFORE_RETRY,
)
@@ -269,22 +269,22 @@ class Responder:
dispute_tx = carrier.get_transaction(job.dispute_txid)
if dispute_tx is not None:
# If the dispute is there, we check the justice
justice_tx = carrier.get_transaction(job.justice_txid)
# If the dispute is there, we check the penalty
penalty_tx = carrier.get_transaction(job.penalty_txid)
if justice_tx is not None:
# If the justice exists we need to check is it's on the blockchain or not so we can update the
if penalty_tx is not None:
# If the penalty exists we need to check is it's on the blockchain or not so we can update the
# unconfirmed transactions list accordingly.
if justice_tx.get("confirmations") is None:
self.unconfirmed_txs.append(job.justice_txid)
if penalty_tx.get("confirmations") is None:
self.unconfirmed_txs.append(job.penalty_txid)
logger.info(
"Justice transaction back in mempool. Updating unconfirmed transactions.",
justice_txid=job.justice_txid,
"Penalty transaction back in mempool. Updating unconfirmed transactions.",
penalty_txid=job.penalty_txid,
)
else:
# If the justice transaction is missing, we need to reset the job.
# If the penalty transaction is missing, we need to reset the job.
# DISCUSS: Adding job back, should we flag it as retried?
# FIXME: Whether we decide to increase the retried counter or not, the current counter should be
# maintained. There is no way of doing so with the current approach. Update if required
@@ -292,17 +292,17 @@ class Responder:
job.locator,
uuid,
job.dispute_txid,
job.justice_txid,
job.justice_rawtx,
job.penalty_txid,
job.penalty_rawtx,
job.appointment_end,
block_hash,
)
logger.warning("Justice transaction banished. Resetting the job", justice_tx=job.justice_txid)
logger.warning("Penalty transaction banished. Resetting the job", penalty_tx=job.penalty_txid)
else:
# ToDo: #24-properly-handle-reorgs
# FIXME: if the dispute is not on chain (either in mempool or not there at all), we need to call the
# reorg manager
logger.warning("Dispute and justice transaction missing. Calling the reorg manager.")
logger.warning("Dispute and penalty transaction missing. Calling the reorg manager.")
logger.error("Reorg manager not yet implemented.")

View File

@@ -13,7 +13,7 @@ FEED_PORT = None
# PISA
MAX_APPOINTMENTS = 100
EXPIRY_DELTA = 6
MIN_DISPUTE_DELTA = 20
MIN_TO_SELF_DELAY = 20
SERVER_LOG_FILE = "pisa.log"
PISA_SECRET_KEY = "pisa_sk.der"

View File

@@ -117,11 +117,11 @@ class Watcher:
filtered_matches = self.filter_valid_matches(self.get_matches(txids))
for uuid, filtered_match in filtered_matches.items():
# Errors decrypting the Blob will result in a None justice_txid
# Errors decrypting the Blob will result in a None penalty_txid
if filtered_match["valid_match"] is True:
logger.info(
"Notifying responder and deleting appointment.",
justice_txid=filtered_match["justice_txid"],
penalty_txid=filtered_match["penalty_txid"],
locator=filtered_match["locator"],
uuid=uuid,
)
@@ -130,8 +130,8 @@ class Watcher:
uuid,
filtered_match["locator"],
filtered_match["dispute_txid"],
filtered_match["justice_txid"],
filtered_match["justice_rawtx"],
filtered_match["penalty_txid"],
filtered_match["penalty_rawtx"],
self.appointments[uuid].end_time,
block_hash,
)
@@ -173,28 +173,28 @@ class Watcher:
for uuid in self.locator_uuid_map[locator]:
try:
justice_rawtx = Cryptographer.decrypt(self.appointments[uuid].encrypted_blob, dispute_txid)
penalty_rawtx = Cryptographer.decrypt(self.appointments[uuid].encrypted_blob, dispute_txid)
except ValueError:
justice_rawtx = None
penalty_rawtx = None
justice_tx = BlockProcessor.decode_raw_transaction(justice_rawtx)
penalty_tx = BlockProcessor.decode_raw_transaction(penalty_rawtx)
if justice_tx is not None:
justice_txid = justice_tx.get("txid")
if penalty_tx is not None:
penalty_txid = penalty_tx.get("txid")
valid_match = True
logger.info("Match found for locator.", locator=locator, uuid=uuid, justice_txid=justice_txid)
logger.info("Match found for locator.", locator=locator, uuid=uuid, penalty_txid=penalty_txid)
else:
justice_txid = None
penalty_txid = None
valid_match = False
filtered_matches[uuid] = {
"locator": locator,
"dispute_txid": dispute_txid,
"justice_txid": justice_txid,
"justice_rawtx": justice_rawtx,
"penalty_txid": penalty_txid,
"penalty_rawtx": penalty_rawtx,
"valid_match": valid_match,
}