Merge branch 'testing' into 13-appointment-signature

This commit is contained in:
Salvatore Ingala
2019-10-23 09:19:44 +08:00
16 changed files with 442 additions and 61 deletions

View File

@@ -93,7 +93,7 @@ def get_appointment():
response.append(job_data)
if not response:
response.append({"locator": locator, "status": "not found"})
response.append({"locator": locator, "status": "not_found"})
response = jsonify(response)

View File

@@ -65,6 +65,7 @@ class BlockProcessor:
return potential_matches
@staticmethod
# NOTCOVERED
def get_matches(potential_matches, locator_uuid_map, appointments):
matches = []
@@ -89,6 +90,7 @@ class BlockProcessor:
# DISCUSS: This method comes from the Responder and seems like it could go back there.
@staticmethod
# NOTCOVERED
def check_confirmations(txs, unconfirmed_txs, tx_job_map, missed_confirmations):
for tx in txs:

View File

@@ -2,7 +2,7 @@ from pisa.rpc_errors import *
from pisa.logger import Logger
from pisa.tools import bitcoin_cli
from pisa.utils.auth_proxy import JSONRPCException
from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION
from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION, RPC_TX_REORGED_AFTER_BROADCAST
logger = Logger("Carrier")
@@ -17,6 +17,7 @@ class Receipt:
class Carrier:
# NOTCOVERED
def send_transaction(self, rawtx, txid):
try:
logger.info("Pushing transaction to the network", txid=txid, rawtx=rawtx)
@@ -50,8 +51,9 @@ class Carrier:
else:
# There's a really unlikely edge case where a transaction can be reorged between receiving the
# notification and querying the data. In such a case we just resend
self.send_transaction(rawtx, txid)
# notification and querying the data. Notice that this implies the tx being also kicked off the
# mempool, which again is really unlikely.
receipt = Receipt(delivered=False, reason=RPC_TX_REORGED_AFTER_BROADCAST)
elif errno == RPC_DESERIALIZATION_ERROR:
# Adding this here just for completeness. We should never end up here. The Carrier only sends txs

View File

@@ -9,6 +9,9 @@ APPOINTMENT_WRONG_FIELD = -7
APPOINTMENT_CIPHER_NOT_SUPPORTED = -8
APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED = -9
# Custom RPC errors
RPC_TX_REORGED_AFTER_BROADCAST = -98
# UNHANDLED
UNKNOWN_JSON_RPC_EXCEPTION = -99

View File

@@ -18,14 +18,12 @@ logger = Logger("Responder")
class Job:
def __init__(self, dispute_txid, justice_txid, justice_rawtx, appointment_end, retry_counter=0):
def __init__(self, dispute_txid, justice_txid, justice_rawtx, appointment_end):
self.dispute_txid = dispute_txid
self.justice_txid = justice_txid
self.justice_rawtx = justice_rawtx
self.appointment_end = appointment_end
self.retry_counter = retry_counter
# FIXME: locator is here so we can give info about jobs for now. It can be either passed from watcher or info
# can be directly got from DB
self.locator = sha256(unhexlify(dispute_txid)).hexdigest()
@@ -56,11 +54,11 @@ class Responder:
carrier = Carrier()
receipt = carrier.send_transaction(justice_rawtx, justice_txid)
# do_watch can call add_response recursively if a broadcast transaction does not get confirmations
# retry holds that information. If retry is true the job already exists
if receipt.delivered:
# do_watch can call add_response recursively if a broadcast transaction does not get confirmations
# retry holds such information.
self.create_job(uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, retry=retry,
confirmations=receipt.confirmations)
if not retry:
self.create_job(uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, receipt.confirmations)
else:
# TODO: Add the missing reasons (e.g. RPC_VERIFY_REJECTED)
@@ -68,25 +66,17 @@ class Responder:
return receipt
def create_job(self, uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations=0,
retry=False):
def create_job(self, uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations=0):
self.jobs[uuid] = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end)
# ToDo: #23-define-behaviour-approaching-end
if retry:
self.jobs[uuid].retry_counter += 1
self.missed_confirmations[justice_txid] = 0
if justice_txid in self.tx_job_map:
self.tx_job_map[justice_txid].append(uuid)
else:
self.jobs[uuid] = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations)
self.tx_job_map[justice_txid] = [uuid]
if justice_txid in self.tx_job_map:
self.tx_job_map[justice_txid].append(uuid)
else:
self.tx_job_map[justice_txid] = [uuid]
if confirmations == 0:
self.unconfirmed_txs.append(justice_txid)
if confirmations == 0:
self.unconfirmed_txs.append(justice_txid)
logger.info("New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid,
appointment_end=appointment_end)
@@ -106,7 +96,7 @@ class Responder:
def do_watch(self):
# ToDo: #9-add-data-persistence
# change prev_block_hash to the last known tip when bootstrapping
prev_block_hash = 0
prev_block_hash = BlockProcessor.get_best_block_hash()
while len(self.jobs) > 0:
# We get notified for every new received block
@@ -121,21 +111,22 @@ class Responder:
block_hash=block_hash, prev_block_hash=block.get('previousblockhash'), txs=txs)
# ToDo: #9-add-data-persistence
# change prev_block_hash condition
if prev_block_hash == block.get('previousblockhash') or prev_block_hash == 0:
if prev_block_hash == block.get('previousblockhash'):
self.unconfirmed_txs, self.missed_confirmations = BlockProcessor.check_confirmations(
txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations)
txs_to_rebroadcast = self.get_txs_to_rebroadcast(txs)
Cleaner.delete_completed_jobs(self.jobs, self.tx_job_map, self.get_completed_jobs(height), height)
completed_jobs = self.get_completed_jobs(height)
Cleaner.delete_completed_jobs(self.jobs, self.tx_job_map, completed_jobs, height)
self.rebroadcast(txs_to_rebroadcast)
# NOTCOVERED
else:
logger.warning("Reorg found",
local_prev_block_hash=prev_block_hash,
logger.warning("Reorg found", local_prev_block_hash=prev_block_hash,
remote_prev_block_hash=block.get('previousblockhash'))
# ToDo: #24-properly-handle-reorgs
self.handle_reorgs()
prev_block_hash = block.get('hash')
@@ -160,31 +151,42 @@ class Responder:
completed_jobs = []
for uuid, job in self.jobs.items():
if job.appointment_end <= height:
if job.appointment_end <= height and job.justice_txid not in self.unconfirmed_txs:
tx = Carrier.get_transaction(job.justice_txid)
# FIXME: Should be improved with the librarian
confirmations = tx.get('confirmations')
if tx is not None and confirmations > MIN_CONFIRMATIONS:
# The end of the appointment has been reached
completed_jobs.append((uuid, confirmations))
if tx is not None:
confirmations = tx.get('confirmations')
if confirmations >= MIN_CONFIRMATIONS:
# The end of the appointment has been reached
completed_jobs.append((uuid, confirmations))
return completed_jobs
def rebroadcast(self, jobs_to_rebroadcast):
def rebroadcast(self, txs_to_rebroadcast):
# DISCUSS: #22-discuss-confirmations-before-retry
# ToDo: #23-define-behaviour-approaching-end
for tx in jobs_to_rebroadcast:
for uuid in self.tx_job_map[tx]:
self.add_response(uuid, self.jobs[uuid].dispute_txid, self.jobs[uuid].justice_txid,
self.jobs[uuid].justice_rawtx, self.jobs[uuid].appointment_end, retry=True)
receipts = []
for txid in txs_to_rebroadcast:
self.missed_confirmations[txid] = 0
for uuid in self.tx_job_map[txid]:
job = self.jobs[uuid]
receipt = self.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx,
job.appointment_end, retry=True)
logger.warning("Transaction has missed many confirmations. Rebroadcasting.",
justice_txid=self.jobs[uuid].justice_txid,
confirmations_missed=CONFIRMATIONS_BEFORE_RETRY)
justice_txid=job.justice_txid, confirmations_missed=CONFIRMATIONS_BEFORE_RETRY)
receipts.append((txid, receipt))
return receipts
# FIXME: Legacy code, must be checked and updated/fixed
# NOTCOVERED
def handle_reorgs(self):
for uuid, job in self.jobs.items():
# First we check if the dispute transaction is still in the blockchain. If not, the justice can not be

View File

@@ -7,12 +7,14 @@ from pisa.rpc_errors import RPC_INVALID_ADDRESS_OR_KEY
from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
# NOTCOVERED
def bitcoin_cli():
return AuthServiceProxy("http://%s:%s@%s:%d" % (conf.BTC_RPC_USER, conf.BTC_RPC_PASSWD, conf.BTC_RPC_HOST,
conf.BTC_RPC_PORT))
# TODO: currently only used in the Responder; might move there or in the BlockProcessor
# NOTCOVERED
def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
tx_in_chain = False
confirmations = 0
@@ -39,6 +41,7 @@ def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
return tx_in_chain, confirmations
# NOTCOVERED
def can_connect_to_bitcoind():
can_connect = True