mirror of
https://github.com/aljazceru/python-teos.git
synced 2026-02-07 15:44:28 +01:00
Reformats code to match code guidelines
This commit is contained in:
@@ -3,25 +3,25 @@ import logging
|
||||
from pisa.utils.auth_proxy import AuthServiceProxy
|
||||
import pisa.conf as conf
|
||||
|
||||
HOST = 'localhost'
|
||||
HOST = "localhost"
|
||||
PORT = 9814
|
||||
|
||||
# Create the file logger
|
||||
f_logger = logging.getLogger('pisa_file_log')
|
||||
f_logger = logging.getLogger("pisa_file_log")
|
||||
f_logger.setLevel(logging.INFO)
|
||||
|
||||
fh = logging.FileHandler(conf.SERVER_LOG_FILE)
|
||||
fh.setLevel(logging.INFO)
|
||||
fh_formatter = logging.Formatter('%(message)s')
|
||||
fh_formatter = logging.Formatter("%(message)s")
|
||||
fh.setFormatter(fh_formatter)
|
||||
f_logger.addHandler(fh)
|
||||
|
||||
# Create the console logger
|
||||
c_logger = logging.getLogger('pisa_console_log')
|
||||
c_logger = logging.getLogger("pisa_console_log")
|
||||
c_logger.setLevel(logging.INFO)
|
||||
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
ch_formatter = logging.Formatter('%(asctime)s %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
ch_formatter = logging.Formatter("%(asctime)s %(message)s", "%Y-%m-%d %H:%M:%S")
|
||||
ch.setFormatter(ch_formatter)
|
||||
c_logger.addHandler(ch)
|
||||
|
||||
36
pisa/api.py
36
pisa/api.py
@@ -22,12 +22,12 @@ logger = Logger("API")
|
||||
watcher = None
|
||||
|
||||
|
||||
@app.route('/', methods=['POST'])
|
||||
@app.route("/", methods=["POST"])
|
||||
def add_appointment():
|
||||
remote_addr = request.environ.get('REMOTE_ADDR')
|
||||
remote_port = request.environ.get('REMOTE_PORT')
|
||||
remote_addr = request.environ.get("REMOTE_ADDR")
|
||||
remote_port = request.environ.get("REMOTE_PORT")
|
||||
|
||||
logger.info('Connection accepted', from_addr_port='{}:{}'.format(remote_addr, remote_port))
|
||||
logger.info("Connection accepted", from_addr_port="{}:{}".format(remote_addr, remote_port))
|
||||
|
||||
# Check content type once if properly defined
|
||||
request_data = json.loads(request.get_json())
|
||||
@@ -42,7 +42,7 @@ def add_appointment():
|
||||
|
||||
if appointment_added:
|
||||
rcode = HTTP_OK
|
||||
response = {"locator": appointment.locator, "signature": hexlify(signature).decode('utf-8')}
|
||||
response = {"locator": appointment.locator, "signature": hexlify(signature).decode("utf-8")}
|
||||
else:
|
||||
rcode = HTTP_SERVICE_UNAVAILABLE
|
||||
error = "appointment rejected"
|
||||
@@ -56,8 +56,12 @@ def add_appointment():
|
||||
rcode = HTTP_BAD_REQUEST
|
||||
error = "appointment rejected. Request does not match the standard"
|
||||
|
||||
logger.info('Sending response and disconnecting',
|
||||
from_addr_port='{}:{}'.format(remote_addr, remote_port), response=response, error=error)
|
||||
logger.info(
|
||||
"Sending response and disconnecting",
|
||||
from_addr_port="{}:{}".format(remote_addr, remote_port),
|
||||
response=response,
|
||||
error=error,
|
||||
)
|
||||
|
||||
if error is None:
|
||||
return jsonify(response), rcode
|
||||
@@ -67,9 +71,9 @@ def add_appointment():
|
||||
|
||||
# FIXME: THE NEXT THREE API ENDPOINTS ARE FOR TESTING AND SHOULD BE REMOVED / PROPERLY MANAGED BEFORE PRODUCTION!
|
||||
# ToDo: #17-add-api-keys
|
||||
@app.route('/get_appointment', methods=['GET'])
|
||||
@app.route("/get_appointment", methods=["GET"])
|
||||
def get_appointment():
|
||||
locator = request.args.get('locator')
|
||||
locator = request.args.get("locator")
|
||||
response = []
|
||||
|
||||
# ToDo: #15-add-system-monitor
|
||||
@@ -79,7 +83,7 @@ def get_appointment():
|
||||
if appointment_in_watcher:
|
||||
for uuid in appointment_in_watcher:
|
||||
appointment_data = watcher.appointments[uuid].to_dict()
|
||||
appointment_data['status'] = "being_watched"
|
||||
appointment_data["status"] = "being_watched"
|
||||
response.append(appointment_data)
|
||||
|
||||
if watcher.responder:
|
||||
@@ -88,7 +92,7 @@ def get_appointment():
|
||||
for job in responder_jobs.values():
|
||||
if job.locator == locator:
|
||||
job_data = job.to_dict()
|
||||
job_data['status'] = "dispute_responded"
|
||||
job_data["status"] = "dispute_responded"
|
||||
response.append(job_data)
|
||||
|
||||
if not response:
|
||||
@@ -99,14 +103,14 @@ def get_appointment():
|
||||
return response
|
||||
|
||||
|
||||
@app.route('/get_all_appointments', methods=['GET'])
|
||||
@app.route("/get_all_appointments", methods=["GET"])
|
||||
def get_all_appointments():
|
||||
watcher_appointments = {}
|
||||
responder_jobs = {}
|
||||
|
||||
# ToDo: #15-add-system-monitor
|
||||
|
||||
if request.remote_addr in request.host or request.remote_addr == '127.0.0.1':
|
||||
if request.remote_addr in request.host or request.remote_addr == "127.0.0.1":
|
||||
for uuid, appointment in watcher.appointments.items():
|
||||
watcher_appointments[uuid] = appointment.to_dict()
|
||||
|
||||
@@ -122,7 +126,7 @@ def get_all_appointments():
|
||||
return response
|
||||
|
||||
|
||||
@app.route('/get_block_count', methods=['GET'])
|
||||
@app.route("/get_block_count", methods=["GET"])
|
||||
def get_block_count():
|
||||
return jsonify({"block_count": BlockProcessor.get_block_count()})
|
||||
|
||||
@@ -135,7 +139,7 @@ def start_api(w):
|
||||
watcher = w
|
||||
|
||||
# Setting Flask log to ERROR only so it does not mess with out logging. Also disabling flask initial messages
|
||||
logging.getLogger('werkzeug').setLevel(logging.ERROR)
|
||||
os.environ['WERKZEUG_RUN_MAIN'] = 'true'
|
||||
logging.getLogger("werkzeug").setLevel(logging.ERROR)
|
||||
os.environ["WERKZEUG_RUN_MAIN"] = "true"
|
||||
|
||||
app.run(host=HOST, port=PORT)
|
||||
|
||||
@@ -6,11 +6,12 @@ from pisa.encrypted_blob import EncryptedBlob
|
||||
# Basic appointment structure
|
||||
class Appointment:
|
||||
# DISCUSS: 35-appointment-checks
|
||||
def __init__(self, locator, start_time, end_time, dispute_delta, encrypted_blob, cipher, hash_function,
|
||||
triggered=False):
|
||||
def __init__(
|
||||
self, locator, start_time, end_time, dispute_delta, encrypted_blob, cipher, hash_function, triggered=False
|
||||
):
|
||||
self.locator = locator
|
||||
self.start_time = start_time # ToDo: #4-standardize-appointment-fields
|
||||
self.end_time = end_time # ToDo: #4-standardize-appointment-fields
|
||||
self.start_time = start_time # ToDo: #4-standardize-appointment-fields
|
||||
self.end_time = end_time # ToDo: #4-standardize-appointment-fields
|
||||
self.dispute_delta = dispute_delta
|
||||
self.encrypted_blob = EncryptedBlob(encrypted_blob)
|
||||
self.cipher = cipher
|
||||
@@ -20,36 +21,55 @@ class Appointment:
|
||||
@classmethod
|
||||
def from_dict(cls, appointment_data):
|
||||
locator = appointment_data.get("locator")
|
||||
start_time = appointment_data.get("start_time") # ToDo: #4-standardize-appointment-fields
|
||||
end_time = appointment_data.get("end_time") # ToDo: #4-standardize-appointment-fields
|
||||
start_time = appointment_data.get("start_time") # ToDo: #4-standardize-appointment-fields
|
||||
end_time = appointment_data.get("end_time") # ToDo: #4-standardize-appointment-fields
|
||||
dispute_delta = appointment_data.get("dispute_delta")
|
||||
encrypted_blob_data = appointment_data.get("encrypted_blob")
|
||||
cipher = appointment_data.get("cipher")
|
||||
hash_function = appointment_data.get("hash_function")
|
||||
triggered = appointment_data.get("triggered")
|
||||
|
||||
if any(v is None for v in [locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher,
|
||||
hash_function, triggered]):
|
||||
if any(
|
||||
v is None
|
||||
for v in [
|
||||
locator,
|
||||
start_time,
|
||||
end_time,
|
||||
dispute_delta,
|
||||
encrypted_blob_data,
|
||||
cipher,
|
||||
hash_function,
|
||||
triggered,
|
||||
]
|
||||
):
|
||||
raise ValueError("Wrong appointment data, some fields are missing")
|
||||
|
||||
else:
|
||||
appointment = cls(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function,
|
||||
triggered)
|
||||
appointment = cls(
|
||||
locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function, triggered
|
||||
)
|
||||
|
||||
return appointment
|
||||
|
||||
def to_dict(self):
|
||||
# ToDO: #3-improve-appointment-structure
|
||||
appointment = {"locator": self.locator, "start_time": self.start_time, "end_time": self.end_time,
|
||||
"dispute_delta": self.dispute_delta, "encrypted_blob": self.encrypted_blob.data,
|
||||
"cipher": self.cipher, "hash_function": self.hash_function, "triggered": self.triggered}
|
||||
appointment = {
|
||||
"locator": self.locator,
|
||||
"start_time": self.start_time,
|
||||
"end_time": self.end_time,
|
||||
"dispute_delta": self.dispute_delta,
|
||||
"encrypted_blob": self.encrypted_blob.data,
|
||||
"cipher": self.cipher,
|
||||
"hash_function": self.hash_function,
|
||||
"triggered": self.triggered,
|
||||
}
|
||||
|
||||
return appointment
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.to_dict(), sort_keys=True, separators=(',', ':'))
|
||||
return json.dumps(self.to_dict(), sort_keys=True, separators=(",", ":"))
|
||||
|
||||
def serialize(self):
|
||||
data = self.to_dict()
|
||||
data.pop("triggered")
|
||||
return json.dumps(data, sort_keys=True, separators=(',', ':')).encode("utf-8")
|
||||
return json.dumps(data, sort_keys=True, separators=(",", ":")).encode("utf-8")
|
||||
|
||||
@@ -101,7 +101,7 @@ class BlockProcessor:
|
||||
try:
|
||||
# ToDo: #20-test-tx-decrypting-edge-cases
|
||||
justice_rawtx = appointments[uuid].encrypted_blob.decrypt(dispute_txid)
|
||||
justice_txid = bitcoin_cli().decoderawtransaction(justice_rawtx).get('txid')
|
||||
justice_txid = bitcoin_cli().decoderawtransaction(justice_rawtx).get("txid")
|
||||
logger.info("Match found for locator.", locator=locator, uuid=uuid, justice_txid=justice_txid)
|
||||
|
||||
except JSONRPCException as e:
|
||||
|
||||
@@ -5,7 +5,6 @@ from pisa.appointment import Appointment
|
||||
|
||||
|
||||
class Builder:
|
||||
|
||||
@staticmethod
|
||||
def build_appointments(appointments_data):
|
||||
appointments = {}
|
||||
@@ -48,4 +47,3 @@ class Builder:
|
||||
block_queue.put(block)
|
||||
|
||||
return block_queue
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ class Carrier:
|
||||
receipt = Receipt(delivered=True)
|
||||
|
||||
except JSONRPCException as e:
|
||||
errno = e.error.get('code')
|
||||
errno = e.error.get("code")
|
||||
# Since we're pushing a raw transaction to the network we can face several rejections
|
||||
if errno == RPC_VERIFY_REJECTED:
|
||||
# DISCUSS: 37-transaction-rejection
|
||||
@@ -64,7 +64,7 @@ class Carrier:
|
||||
|
||||
else:
|
||||
# If something else happens (unlikely but possible) log it so we can treat it in future releases
|
||||
logger.error("JSONRPCException.", method='Carrier.send_transaction', error=e.error)
|
||||
logger.error("JSONRPCException.", method="Carrier.send_transaction", error=e.error)
|
||||
receipt = Receipt(delivered=False, reason=UNKNOWN_JSON_RPC_EXCEPTION)
|
||||
|
||||
return receipt
|
||||
@@ -79,11 +79,11 @@ class Carrier:
|
||||
# While it's quite unlikely, the transaction that was already in the blockchain could have been
|
||||
# reorged while we were querying bitcoind to get the confirmation count. In such a case we just
|
||||
# restart the job
|
||||
if e.error.get('code') == RPC_INVALID_ADDRESS_OR_KEY:
|
||||
if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY:
|
||||
logger.info("Transaction got reorged before obtaining information", txid=txid)
|
||||
|
||||
else:
|
||||
# If something else happens (unlikely but possible) log it so we can treat it in future releases
|
||||
logger.error("JSONRPCException.", method='Carrier.get_transaction', error=e.error)
|
||||
logger.error("JSONRPCException.", method="Carrier.get_transaction", error=e.error)
|
||||
|
||||
return tx_info
|
||||
|
||||
@@ -28,8 +28,12 @@ class Cleaner:
|
||||
@staticmethod
|
||||
def delete_completed_jobs(jobs, tx_job_map, completed_jobs, height, db_manager):
|
||||
for uuid, confirmations in completed_jobs:
|
||||
logger.info("Job completed. Appointment ended after reaching enough confirmations.",
|
||||
uuid=uuid, height=height, confirmations=confirmations)
|
||||
logger.info(
|
||||
"Job completed. Appointment ended after reaching enough confirmations.",
|
||||
uuid=uuid,
|
||||
height=height,
|
||||
confirmations=confirmations,
|
||||
)
|
||||
|
||||
# ToDo: #9-add-data-persistence
|
||||
justice_txid = jobs[uuid].justice_txid
|
||||
|
||||
@@ -9,7 +9,7 @@ WATCHER_PREFIX = "w"
|
||||
WATCHER_LAST_BLOCK_KEY = "bw"
|
||||
RESPONDER_PREFIX = "r"
|
||||
RESPONDER_LAST_BLOCK_KEY = "br"
|
||||
LOCATOR_MAP_PREFIX = 'm'
|
||||
LOCATOR_MAP_PREFIX = "m"
|
||||
|
||||
|
||||
class DBManager:
|
||||
@@ -21,25 +21,25 @@ class DBManager:
|
||||
self.db = plyvel.DB(db_path)
|
||||
|
||||
except plyvel.Error as e:
|
||||
if 'create_if_missing is false' in str(e):
|
||||
if "create_if_missing is false" in str(e):
|
||||
logger.info("No db found. Creating a fresh one")
|
||||
self.db = plyvel.DB(db_path, create_if_missing=True)
|
||||
|
||||
def load_appointments_db(self, prefix):
|
||||
data = {}
|
||||
|
||||
for k, v in self.db.iterator(prefix=prefix.encode('utf-8')):
|
||||
for k, v in self.db.iterator(prefix=prefix.encode("utf-8")):
|
||||
# Get uuid and appointment_data from the db
|
||||
uuid = k[len(prefix):].decode('utf-8')
|
||||
uuid = k[len(prefix) :].decode("utf-8")
|
||||
data[uuid] = json.loads(v)
|
||||
|
||||
return data
|
||||
|
||||
def get_last_known_block(self, key):
|
||||
last_block = self.db.get(key.encode('utf-8'))
|
||||
last_block = self.db.get(key.encode("utf-8"))
|
||||
|
||||
if last_block:
|
||||
last_block = last_block.decode('utf-8')
|
||||
last_block = last_block.decode("utf-8")
|
||||
|
||||
return last_block
|
||||
|
||||
@@ -47,23 +47,24 @@ class DBManager:
|
||||
if isinstance(prefix, str):
|
||||
key = prefix + key
|
||||
|
||||
key = key.encode('utf-8')
|
||||
value = value.encode('utf-8')
|
||||
key = key.encode("utf-8")
|
||||
value = value.encode("utf-8")
|
||||
|
||||
self.db.put(key, value)
|
||||
|
||||
def delete_entry(self, key, prefix=None):
|
||||
def delete_entry(self, key, prefix=None):
|
||||
if isinstance(prefix, str):
|
||||
key = prefix + key
|
||||
|
||||
key = key.encode('utf-8')
|
||||
key = key.encode("utf-8")
|
||||
|
||||
self.db.delete(key)
|
||||
|
||||
def load_watcher_appointments(self):
|
||||
all_appointments = self.load_appointments_db(prefix=WATCHER_PREFIX)
|
||||
non_triggered_appointments = {uuid: appointment for uuid, appointment in all_appointments.items()
|
||||
if appointment["triggered"] is False}
|
||||
non_triggered_appointments = {
|
||||
uuid: appointment for uuid, appointment in all_appointments.items() if appointment["triggered"] is False
|
||||
}
|
||||
|
||||
return non_triggered_appointments
|
||||
|
||||
@@ -79,11 +80,11 @@ class DBManager:
|
||||
logger.info("Adding appointment to Responder's db", uuid=uuid)
|
||||
|
||||
def load_locator_map(self, locator):
|
||||
key = (LOCATOR_MAP_PREFIX+locator).encode('utf-8')
|
||||
key = (LOCATOR_MAP_PREFIX + locator).encode("utf-8")
|
||||
locator_map = self.db.get(key)
|
||||
|
||||
if locator_map is not None:
|
||||
locator_map = json.loads(locator_map.decode('utf-8'))
|
||||
locator_map = json.loads(locator_map.decode("utf-8"))
|
||||
|
||||
else:
|
||||
logger.info("Locator not found in the db", locator=locator)
|
||||
@@ -105,8 +106,8 @@ class DBManager:
|
||||
locator_map = [uuid]
|
||||
logger.info("Creating new locator map", locator=locator, uuid=uuid)
|
||||
|
||||
key = (LOCATOR_MAP_PREFIX + locator).encode('utf-8')
|
||||
self.db.put(key, json.dumps(locator_map).encode('utf-8'))
|
||||
key = (LOCATOR_MAP_PREFIX + locator).encode("utf-8")
|
||||
self.db.put(key, json.dumps(locator_map).encode("utf-8"))
|
||||
|
||||
def delete_locator_map(self, locator):
|
||||
self.delete_entry(locator, prefix=LOCATOR_MAP_PREFIX)
|
||||
|
||||
@@ -26,11 +26,13 @@ class EncryptedBlob:
|
||||
sk = master_key[:16]
|
||||
nonce = master_key[16:]
|
||||
|
||||
logger.info("Creating new blob.",
|
||||
master_key=hexlify(master_key).decode(),
|
||||
sk=hexlify(sk).decode(),
|
||||
nonce=hexlify(sk).decode(),
|
||||
encrypted_blob=self.data)
|
||||
logger.info(
|
||||
"Creating new blob.",
|
||||
master_key=hexlify(master_key).decode(),
|
||||
sk=hexlify(sk).decode(),
|
||||
nonce=hexlify(sk).decode(),
|
||||
encrypted_blob=self.data,
|
||||
)
|
||||
|
||||
# Decrypt
|
||||
aesgcm = AESGCM(sk)
|
||||
@@ -38,7 +40,7 @@ class EncryptedBlob:
|
||||
|
||||
try:
|
||||
raw_tx = aesgcm.decrypt(nonce=nonce, data=data, associated_data=None)
|
||||
hex_raw_tx = hexlify(raw_tx).decode('utf8')
|
||||
hex_raw_tx = hexlify(raw_tx).decode("utf8")
|
||||
|
||||
except InvalidTag:
|
||||
hex_raw_tx = None
|
||||
|
||||
@@ -14,5 +14,3 @@ RPC_TX_REORGED_AFTER_BROADCAST = -98
|
||||
|
||||
# UNHANDLED
|
||||
UNKNOWN_JSON_RPC_EXCEPTION = -99
|
||||
|
||||
|
||||
|
||||
@@ -16,13 +16,13 @@ logger = Logger("Inspector")
|
||||
|
||||
class Inspector:
|
||||
def inspect(self, data):
|
||||
locator = data.get('locator')
|
||||
start_time = data.get('start_time')
|
||||
end_time = data.get('end_time')
|
||||
dispute_delta = data.get('dispute_delta')
|
||||
encrypted_blob = data.get('encrypted_blob')
|
||||
cipher = data.get('cipher')
|
||||
hash_function = data.get('hash_function')
|
||||
locator = data.get("locator")
|
||||
start_time = data.get("start_time")
|
||||
end_time = data.get("end_time")
|
||||
dispute_delta = data.get("dispute_delta")
|
||||
encrypted_blob = data.get("encrypted_blob")
|
||||
cipher = data.get("cipher")
|
||||
hash_function = data.get("hash_function")
|
||||
|
||||
block_height = BlockProcessor.get_block_count()
|
||||
|
||||
@@ -61,14 +61,17 @@ class Inspector:
|
||||
if locator is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty locator received"
|
||||
|
||||
elif type(locator) != str:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong locator data type ({})".format(type(locator))
|
||||
|
||||
elif len(locator) != 64:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_SIZE
|
||||
message = "wrong locator size ({})".format(len(locator))
|
||||
# TODO: #12-check-txid-regexp
|
||||
elif re.search(r'^[0-9A-Fa-f]+$', locator) is None:
|
||||
|
||||
elif re.search(r"^[0-9A-Fa-f]+$", locator) is None:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_FORMAT
|
||||
message = "wrong locator format ({})".format(locator)
|
||||
|
||||
@@ -90,9 +93,11 @@ class Inspector:
|
||||
if start_time is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty start_time received"
|
||||
|
||||
elif t != int:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong start_time data type ({})".format(t)
|
||||
|
||||
elif start_time <= block_height:
|
||||
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
|
||||
if start_time < block_height:
|
||||
@@ -118,21 +123,24 @@ class Inspector:
|
||||
if end_time is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty end_time received"
|
||||
|
||||
elif t != int:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong end_time data type ({})".format(t)
|
||||
|
||||
elif start_time >= end_time:
|
||||
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
|
||||
if start_time > end_time:
|
||||
message = "end_time is smaller than start_time"
|
||||
else:
|
||||
message = "end_time is equal to start_time"
|
||||
|
||||
elif block_height >= end_time:
|
||||
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
|
||||
if block_height > end_time:
|
||||
message = 'end_time is in the past'
|
||||
message = "end_time is in the past"
|
||||
else:
|
||||
message = 'end_time is too close to current height'
|
||||
message = "end_time is too close to current height"
|
||||
|
||||
if message is not None:
|
||||
logger.error(message)
|
||||
@@ -149,13 +157,16 @@ class Inspector:
|
||||
if dispute_delta is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty dispute_delta received"
|
||||
|
||||
elif t != int:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong dispute_delta data type ({})".format(t)
|
||||
|
||||
elif dispute_delta < conf.MIN_DISPUTE_DELTA:
|
||||
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
|
||||
message = "dispute delta too small. The dispute delta should be at least {} (current: {})".format(
|
||||
conf.MIN_DISPUTE_DELTA, dispute_delta)
|
||||
conf.MIN_DISPUTE_DELTA, dispute_delta
|
||||
)
|
||||
|
||||
if message is not None:
|
||||
logger.error(message)
|
||||
@@ -173,10 +184,12 @@ class Inspector:
|
||||
if encrypted_blob is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty encrypted_blob received"
|
||||
|
||||
elif t != str:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong encrypted_blob data type ({})".format(t)
|
||||
elif re.search(r'^[0-9A-Fa-f]+$', encrypted_blob) is None:
|
||||
|
||||
elif re.search(r"^[0-9A-Fa-f]+$", encrypted_blob) is None:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_FORMAT
|
||||
message = "wrong encrypted_blob format ({})".format(encrypted_blob)
|
||||
|
||||
@@ -195,9 +208,11 @@ class Inspector:
|
||||
if cipher is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty cipher received"
|
||||
|
||||
elif t != str:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong cipher data type ({})".format(t)
|
||||
|
||||
elif cipher.upper() not in conf.SUPPORTED_CIPHERS:
|
||||
rcode = errors.APPOINTMENT_CIPHER_NOT_SUPPORTED
|
||||
message = "cipher not supported: {}".format(cipher)
|
||||
@@ -217,9 +232,11 @@ class Inspector:
|
||||
if hash_function is None:
|
||||
rcode = errors.APPOINTMENT_EMPTY_FIELD
|
||||
message = "empty hash_function received"
|
||||
|
||||
elif t != str:
|
||||
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
|
||||
message = "wrong hash_function data type ({})".format(t)
|
||||
|
||||
elif hash_function.upper() not in conf.SUPPORTED_HASH_FUNCTIONS:
|
||||
rcode = errors.APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED
|
||||
message = "hash_function not supported {}".format(hash_function)
|
||||
|
||||
@@ -24,14 +24,14 @@ def handle_signals(signal_received, frame):
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
logger.info("Starting PISA")
|
||||
|
||||
signal(SIGINT, handle_signals)
|
||||
signal(SIGTERM, handle_signals)
|
||||
signal(SIGQUIT, handle_signals)
|
||||
|
||||
opts, _ = getopt(argv[1:], '', [''])
|
||||
opts, _ = getopt(argv[1:], "", [""])
|
||||
for opt, arg in opts:
|
||||
# FIXME: Leaving this here for future option/arguments
|
||||
pass
|
||||
@@ -62,8 +62,11 @@ if __name__ == '__main__':
|
||||
last_block_responder = db_manager.load_last_block_hash_responder()
|
||||
|
||||
missed_blocks_watcher = block_processor.get_missed_blocks(last_block_watcher)
|
||||
missed_blocks_responder = missed_blocks_watcher if last_block_watcher == last_block_responder \
|
||||
missed_blocks_responder = (
|
||||
missed_blocks_watcher
|
||||
if last_block_watcher == last_block_responder
|
||||
else block_processor.get_missed_blocks(last_block_watcher)
|
||||
)
|
||||
|
||||
responder = Responder(db_manager)
|
||||
responder.jobs, responder.tx_job_map = Builder.build_jobs(responder_jobs_data)
|
||||
@@ -79,4 +82,3 @@ if __name__ == '__main__':
|
||||
except Exception as e:
|
||||
logger.error("An error occurred: {}. Shutting down".format(e))
|
||||
exit(1)
|
||||
|
||||
|
||||
@@ -44,8 +44,13 @@ class Job:
|
||||
return job
|
||||
|
||||
def to_dict(self):
|
||||
job = {"locator": self.locator, "dispute_txid": self.dispute_txid, "justice_txid": self.justice_txid,
|
||||
"justice_rawtx": self.justice_rawtx, "appointment_end": self.appointment_end}
|
||||
job = {
|
||||
"locator": self.locator,
|
||||
"dispute_txid": self.dispute_txid,
|
||||
"justice_txid": self.justice_txid,
|
||||
"justice_rawtx": self.justice_rawtx,
|
||||
"appointment_end": self.appointment_end,
|
||||
}
|
||||
|
||||
return job
|
||||
|
||||
@@ -113,8 +118,9 @@ class Responder:
|
||||
|
||||
self.db_manager.store_responder_job(uuid, job.to_json())
|
||||
|
||||
logger.info("New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid,
|
||||
appointment_end=appointment_end)
|
||||
logger.info(
|
||||
"New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid, appointment_end=appointment_end
|
||||
)
|
||||
|
||||
if self.asleep:
|
||||
self.asleep = False
|
||||
@@ -124,7 +130,7 @@ class Responder:
|
||||
responder.start()
|
||||
|
||||
def do_subscribe(self):
|
||||
self.zmq_subscriber = ZMQHandler(parent='Responder')
|
||||
self.zmq_subscriber = ZMQHandler(parent="Responder")
|
||||
self.zmq_subscriber.handle(self.block_queue)
|
||||
|
||||
def do_watch(self):
|
||||
@@ -138,16 +144,18 @@ class Responder:
|
||||
block = BlockProcessor.get_block(block_hash)
|
||||
|
||||
if block is not None:
|
||||
txs = block.get('tx')
|
||||
height = block.get('height')
|
||||
txs = block.get("tx")
|
||||
height = block.get("height")
|
||||
|
||||
logger.info("New block received",
|
||||
block_hash=block_hash, prev_block_hash=block.get('previousblockhash'), txs=txs)
|
||||
logger.info(
|
||||
"New block received", block_hash=block_hash, prev_block_hash=block.get("previousblockhash"), txs=txs
|
||||
)
|
||||
|
||||
# ToDo: #9-add-data-persistence
|
||||
if prev_block_hash == block.get('previousblockhash'):
|
||||
if prev_block_hash == block.get("previousblockhash"):
|
||||
self.unconfirmed_txs, self.missed_confirmations = BlockProcessor.check_confirmations(
|
||||
txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations)
|
||||
txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations
|
||||
)
|
||||
|
||||
txs_to_rebroadcast = self.get_txs_to_rebroadcast(txs)
|
||||
completed_jobs = self.get_completed_jobs(height)
|
||||
@@ -157,8 +165,11 @@ class Responder:
|
||||
|
||||
# NOTCOVERED
|
||||
else:
|
||||
logger.warning("Reorg found", local_prev_block_hash=prev_block_hash,
|
||||
remote_prev_block_hash=block.get('previousblockhash'))
|
||||
logger.warning(
|
||||
"Reorg found",
|
||||
local_prev_block_hash=prev_block_hash,
|
||||
remote_prev_block_hash=block.get("previousblockhash"),
|
||||
)
|
||||
|
||||
# ToDo: #24-properly-handle-reorgs
|
||||
self.handle_reorgs()
|
||||
@@ -166,7 +177,7 @@ class Responder:
|
||||
# Register the last processed block for the responder
|
||||
self.db_manager.store_last_block_hash_responder(block_hash)
|
||||
|
||||
prev_block_hash = block.get('hash')
|
||||
prev_block_hash = block.get("hash")
|
||||
|
||||
# Go back to sleep if there are no more jobs
|
||||
self.asleep = True
|
||||
@@ -194,7 +205,7 @@ class Responder:
|
||||
|
||||
# FIXME: Should be improved with the librarian
|
||||
if tx is not None:
|
||||
confirmations = tx.get('confirmations')
|
||||
confirmations = tx.get("confirmations")
|
||||
|
||||
if confirmations >= MIN_CONFIRMATIONS:
|
||||
# The end of the appointment has been reached
|
||||
@@ -213,11 +224,21 @@ class Responder:
|
||||
|
||||
for uuid in self.tx_job_map[txid]:
|
||||
job = self.jobs[uuid]
|
||||
receipt = self.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx,
|
||||
job.appointment_end, block_hash, retry=True)
|
||||
receipt = self.add_response(
|
||||
uuid,
|
||||
job.dispute_txid,
|
||||
job.justice_txid,
|
||||
job.justice_rawtx,
|
||||
job.appointment_end,
|
||||
block_hash,
|
||||
retry=True,
|
||||
)
|
||||
|
||||
logger.warning("Transaction has missed many confirmations. Rebroadcasting.",
|
||||
justice_txid=job.justice_txid, confirmations_missed=CONFIRMATIONS_BEFORE_RETRY)
|
||||
logger.warning(
|
||||
"Transaction has missed many confirmations. Rebroadcasting.",
|
||||
justice_txid=job.justice_txid,
|
||||
confirmations_missed=CONFIRMATIONS_BEFORE_RETRY,
|
||||
)
|
||||
|
||||
receipts.append((txid, receipt))
|
||||
|
||||
@@ -229,19 +250,22 @@ class Responder:
|
||||
for uuid, job in self.jobs.items():
|
||||
# First we check if the dispute transaction is still in the blockchain. If not, the justice can not be
|
||||
# there either, so we'll need to call the reorg manager straight away
|
||||
dispute_in_chain, _ = check_tx_in_chain(job.dispute_txid, logger=logger, tx_label='Dispute tx')
|
||||
dispute_in_chain, _ = check_tx_in_chain(job.dispute_txid, logger=logger, tx_label="Dispute tx")
|
||||
|
||||
# If the dispute is there, we can check the justice tx
|
||||
if dispute_in_chain:
|
||||
justice_in_chain, justice_confirmations = check_tx_in_chain(job.justice_txid, logger=logger,
|
||||
tx_label='Justice tx')
|
||||
justice_in_chain, justice_confirmations = check_tx_in_chain(
|
||||
job.justice_txid, logger=logger, tx_label="Justice tx"
|
||||
)
|
||||
|
||||
# If both transactions are there, we only need to update the justice tx confirmation count
|
||||
if justice_in_chain:
|
||||
logger.info("Updating confirmation count for transaction.",
|
||||
justice_txid=job.justice_txid,
|
||||
prev_count=job.confirmations,
|
||||
curr_count=justice_confirmations)
|
||||
logger.info(
|
||||
"Updating confirmation count for transaction.",
|
||||
justice_txid=job.justice_txid,
|
||||
prev_count=job.confirmations,
|
||||
curr_count=justice_confirmations,
|
||||
)
|
||||
|
||||
job.confirmations = justice_confirmations
|
||||
|
||||
|
||||
@@ -1,42 +1,44 @@
|
||||
# Ported from https://github.com/bitcoin/bitcoin/blob/0.18/src/rpc/protocol.h
|
||||
|
||||
# General application defined errors
|
||||
RPC_MISC_ERROR = -1 # std::exception thrown in command handling
|
||||
RPC_TYPE_ERROR = -3 # Unexpected type was passed as parameter
|
||||
RPC_INVALID_ADDRESS_OR_KEY = -5 # Invalid address or key
|
||||
RPC_OUT_OF_MEMORY = -7 # Ran out of memory during operation
|
||||
RPC_INVALID_PARAMETER = -8 # Invalid missing or duplicate parameter
|
||||
RPC_DATABASE_ERROR = -20 # Database error
|
||||
RPC_DESERIALIZATION_ERROR = -22 # Error parsing or validating structure in raw format
|
||||
RPC_VERIFY_ERROR = -25 # General error during transaction or block submission
|
||||
RPC_VERIFY_REJECTED = -26 # Transaction or block was rejected by network rules
|
||||
RPC_VERIFY_ALREADY_IN_CHAIN = -27 # Transaction already in chain
|
||||
RPC_IN_WARMUP = -28 # Client still warming up
|
||||
RPC_METHOD_DEPRECATED = -32 # RPC method is deprecated
|
||||
RPC_MISC_ERROR = -1 # std::exception thrown in command handling
|
||||
RPC_TYPE_ERROR = -3 # Unexpected type was passed as parameter
|
||||
RPC_INVALID_ADDRESS_OR_KEY = -5 # Invalid address or key
|
||||
RPC_OUT_OF_MEMORY = -7 # Ran out of memory during operation
|
||||
RPC_INVALID_PARAMETER = -8 # Invalid missing or duplicate parameter
|
||||
RPC_DATABASE_ERROR = -20 # Database error
|
||||
RPC_DESERIALIZATION_ERROR = -22 # Error parsing or validating structure in raw format
|
||||
RPC_VERIFY_ERROR = -25 # General error during transaction or block submission
|
||||
RPC_VERIFY_REJECTED = -26 # Transaction or block was rejected by network rules
|
||||
RPC_VERIFY_ALREADY_IN_CHAIN = -27 # Transaction already in chain
|
||||
RPC_IN_WARMUP = -28 # Client still warming up
|
||||
RPC_METHOD_DEPRECATED = -32 # RPC method is deprecated
|
||||
|
||||
# Aliases for backward compatibility
|
||||
RPC_TRANSACTION_ERROR = RPC_VERIFY_ERROR
|
||||
RPC_TRANSACTION_REJECTED = RPC_VERIFY_REJECTED
|
||||
RPC_TRANSACTION_ALREADY_IN_CHAIN= RPC_VERIFY_ALREADY_IN_CHAIN
|
||||
RPC_TRANSACTION_ERROR = RPC_VERIFY_ERROR
|
||||
RPC_TRANSACTION_REJECTED = RPC_VERIFY_REJECTED
|
||||
RPC_TRANSACTION_ALREADY_IN_CHAIN = RPC_VERIFY_ALREADY_IN_CHAIN
|
||||
|
||||
# P2P client errors
|
||||
RPC_CLIENT_NOT_CONNECTED = -9 # Bitcoin is not connected
|
||||
RPC_CLIENT_IN_INITIAL_DOWNLOAD = -10 # Still downloading initial blocks
|
||||
RPC_CLIENT_NODE_ALREADY_ADDED = -23 # Node is already added
|
||||
RPC_CLIENT_NODE_NOT_ADDED = -24 # Node has not been added before
|
||||
RPC_CLIENT_NODE_NOT_CONNECTED = -29 # Node to disconnect not found in connected nodes
|
||||
RPC_CLIENT_NOT_CONNECTED = -9 # Bitcoin is not connected
|
||||
RPC_CLIENT_IN_INITIAL_DOWNLOAD = -10 # Still downloading initial blocks
|
||||
RPC_CLIENT_NODE_ALREADY_ADDED = -23 # Node is already added
|
||||
RPC_CLIENT_NODE_NOT_ADDED = -24 # Node has not been added before
|
||||
RPC_CLIENT_NODE_NOT_CONNECTED = -29 # Node to disconnect not found in connected nodes
|
||||
RPC_CLIENT_INVALID_IP_OR_SUBNET = -30 # Invalid IP/Subnet
|
||||
RPC_CLIENT_P2P_DISABLED = -31 # No valid connection manager instance found
|
||||
RPC_CLIENT_P2P_DISABLED = -31 # No valid connection manager instance found
|
||||
|
||||
# Wallet errors
|
||||
RPC_WALLET_ERROR = -4 # Unspecified problem with wallet (key not found etc.)
|
||||
RPC_WALLET_INSUFFICIENT_FUNDS = -6 # Not enough funds in wallet or account
|
||||
RPC_WALLET_INVALID_LABEL_NAME = -11 # Invalid label name
|
||||
RPC_WALLET_KEYPOOL_RAN_OUT = -12 # Keypool ran out call keypoolrefill first
|
||||
RPC_WALLET_UNLOCK_NEEDED = -13 # Enter the wallet passphrase with walletpassphrase first
|
||||
RPC_WALLET_ERROR = -4 # Unspecified problem with wallet (key not found etc.)
|
||||
RPC_WALLET_INSUFFICIENT_FUNDS = -6 # Not enough funds in wallet or account
|
||||
RPC_WALLET_INVALID_LABEL_NAME = -11 # Invalid label name
|
||||
RPC_WALLET_KEYPOOL_RAN_OUT = -12 # Keypool ran out call keypoolrefill first
|
||||
RPC_WALLET_UNLOCK_NEEDED = -13 # Enter the wallet passphrase with walletpassphrase first
|
||||
RPC_WALLET_PASSPHRASE_INCORRECT = -14 # The wallet passphrase entered was incorrect
|
||||
RPC_WALLET_WRONG_ENC_STATE = -15 # Command given in wrong wallet encryption state (encrypting an encrypted wallet etc.)
|
||||
RPC_WALLET_ENCRYPTION_FAILED = -16 # Failed to encrypt the wallet
|
||||
RPC_WALLET_ALREADY_UNLOCKED = -17 # Wallet is already unlocked
|
||||
RPC_WALLET_NOT_FOUND = -18 # Invalid wallet specified
|
||||
RPC_WALLET_NOT_SPECIFIED = -19 # No wallet specified (error when there are multiple wallets loaded)
|
||||
RPC_WALLET_WRONG_ENC_STATE = (
|
||||
-15
|
||||
) # Command given in wrong wallet encryption state (encrypting an encrypted wallet etc.)
|
||||
RPC_WALLET_ENCRYPTION_FAILED = -16 # Failed to encrypt the wallet
|
||||
RPC_WALLET_ALREADY_UNLOCKED = -17 # Wallet is already unlocked
|
||||
RPC_WALLET_NOT_FOUND = -18 # Invalid wallet specified
|
||||
RPC_WALLET_NOT_SPECIFIED = -19 # No wallet specified (error when there are multiple wallets loaded)
|
||||
|
||||
@@ -14,11 +14,11 @@ FEED_PORT = None
|
||||
MAX_APPOINTMENTS = 100
|
||||
EXPIRY_DELTA = 6
|
||||
MIN_DISPUTE_DELTA = 20
|
||||
SERVER_LOG_FILE = 'pisa.log'
|
||||
PISA_SECRET_KEY = 'pisa_sk.pem'
|
||||
SERVER_LOG_FILE = "pisa.log"
|
||||
PISA_SECRET_KEY = "pisa_sk.pem"
|
||||
|
||||
# PISA-CLI
|
||||
CLIENT_LOG_FILE = 'pisa.log'
|
||||
CLIENT_LOG_FILE = "pisa.log"
|
||||
|
||||
# TEST
|
||||
TEST_LOG_FILE = "test.log"
|
||||
|
||||
@@ -9,13 +9,14 @@ from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
|
||||
|
||||
# NOTCOVERED
|
||||
def bitcoin_cli():
|
||||
return AuthServiceProxy("http://%s:%s@%s:%d" % (conf.BTC_RPC_USER, conf.BTC_RPC_PASSWD, conf.BTC_RPC_HOST,
|
||||
conf.BTC_RPC_PORT))
|
||||
return AuthServiceProxy(
|
||||
"http://%s:%s@%s:%d" % (conf.BTC_RPC_USER, conf.BTC_RPC_PASSWD, conf.BTC_RPC_HOST, conf.BTC_RPC_PORT)
|
||||
)
|
||||
|
||||
|
||||
# TODO: currently only used in the Responder; might move there or in the BlockProcessor
|
||||
# NOTCOVERED
|
||||
def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
|
||||
def check_tx_in_chain(tx_id, logger=Logger(), tx_label="Transaction"):
|
||||
tx_in_chain = False
|
||||
confirmations = 0
|
||||
|
||||
@@ -31,12 +32,12 @@ def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
|
||||
logger.error("{} found in mempool".format(tx_label), txid=tx_id)
|
||||
|
||||
except JSONRPCException as e:
|
||||
if e.error.get('code') == RPC_INVALID_ADDRESS_OR_KEY:
|
||||
if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY:
|
||||
logger.error("{} not found in mempool nor blockchain".format(tx_label), txid=tx_id)
|
||||
|
||||
else:
|
||||
# ToDO: Unhandled errors, check this properly
|
||||
logger.error("JSONRPCException.", method='tools.check_tx_in_chain', error=e.error)
|
||||
logger.error("JSONRPCException.", method="tools.check_tx_in_chain", error=e.error)
|
||||
|
||||
return tx_in_chain, confirmations
|
||||
|
||||
@@ -60,11 +61,11 @@ def in_correct_network(network):
|
||||
|
||||
genesis_block_hash = bitcoin_cli().getblockhash(0)
|
||||
|
||||
if network == 'mainnet' and genesis_block_hash == mainnet_genesis_block_hash:
|
||||
if network == "mainnet" and genesis_block_hash == mainnet_genesis_block_hash:
|
||||
correct_network = True
|
||||
elif network == 'testnet' and genesis_block_hash == testnet3_genesis_block_hash:
|
||||
elif network == "testnet" and genesis_block_hash == testnet3_genesis_block_hash:
|
||||
correct_network = True
|
||||
elif network == 'regtest' and genesis_block_hash not in [mainnet_genesis_block_hash, testnet3_genesis_block_hash]:
|
||||
elif network == "regtest" and genesis_block_hash not in [mainnet_genesis_block_hash, testnet3_genesis_block_hash]:
|
||||
correct_network = True
|
||||
|
||||
return correct_network
|
||||
@@ -72,5 +73,4 @@ def in_correct_network(network):
|
||||
|
||||
def check_txid_format(txid):
|
||||
# TODO: #12-check-txid-regexp
|
||||
return isinstance(txid, str) and re.search(r'^[0-9A-Fa-f]{64}$', txid) is not None
|
||||
|
||||
return isinstance(txid, str) and re.search(r"^[0-9A-Fa-f]{64}$", txid) is not None
|
||||
|
||||
@@ -57,9 +57,9 @@ log = logging.getLogger("BitcoinRPC")
|
||||
class JSONRPCException(Exception):
|
||||
def __init__(self, rpc_error, http_status=None):
|
||||
try:
|
||||
errmsg = '%(message)s (%(code)i)' % rpc_error
|
||||
errmsg = "%(message)s (%(code)i)" % rpc_error
|
||||
except (KeyError, TypeError):
|
||||
errmsg = ''
|
||||
errmsg = ""
|
||||
super().__init__(errmsg)
|
||||
self.error = rpc_error
|
||||
self.http_status = http_status
|
||||
@@ -71,7 +71,7 @@ def EncodeDecimal(o):
|
||||
raise TypeError(repr(o) + " is not JSON serializable")
|
||||
|
||||
|
||||
class AuthServiceProxy():
|
||||
class AuthServiceProxy:
|
||||
__id_count = 0
|
||||
|
||||
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
|
||||
@@ -80,15 +80,15 @@ class AuthServiceProxy():
|
||||
self._service_name = service_name
|
||||
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
|
||||
self.__url = urllib.parse.urlparse(service_url)
|
||||
user = None if self.__url.username is None else self.__url.username.encode('utf8')
|
||||
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
|
||||
authpair = user + b':' + passwd
|
||||
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
|
||||
user = None if self.__url.username is None else self.__url.username.encode("utf8")
|
||||
passwd = None if self.__url.password is None else self.__url.password.encode("utf8")
|
||||
authpair = user + b":" + passwd
|
||||
self.__auth_header = b"Basic " + base64.b64encode(authpair)
|
||||
self.timeout = timeout
|
||||
self._set_conn(connection)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name.startswith('__') and name.endswith('__'):
|
||||
if name.startswith("__") and name.endswith("__"):
|
||||
# Python internal stuff
|
||||
raise AttributeError
|
||||
if self._service_name is not None:
|
||||
@@ -96,15 +96,17 @@ class AuthServiceProxy():
|
||||
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
|
||||
|
||||
def _request(self, method, path, postdata):
|
||||
'''
|
||||
"""
|
||||
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
|
||||
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
|
||||
'''
|
||||
headers = {'Host': self.__url.hostname,
|
||||
'User-Agent': USER_AGENT,
|
||||
'Authorization': self.__auth_header,
|
||||
'Content-type': 'application/json'}
|
||||
if os.name == 'nt':
|
||||
"""
|
||||
headers = {
|
||||
"Host": self.__url.hostname,
|
||||
"User-Agent": USER_AGENT,
|
||||
"Authorization": self.__auth_header,
|
||||
"Content-type": "application/json",
|
||||
}
|
||||
if os.name == "nt":
|
||||
# Windows somehow does not like to re-use connections
|
||||
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
|
||||
self._set_conn()
|
||||
@@ -128,39 +130,40 @@ class AuthServiceProxy():
|
||||
def get_request(self, *args, **argsn):
|
||||
AuthServiceProxy.__id_count += 1
|
||||
|
||||
log.debug("-{}-> {} {}".format(
|
||||
AuthServiceProxy.__id_count,
|
||||
self._service_name,
|
||||
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
|
||||
))
|
||||
log.debug(
|
||||
"-{}-> {} {}".format(
|
||||
AuthServiceProxy.__id_count,
|
||||
self._service_name,
|
||||
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
|
||||
)
|
||||
)
|
||||
if args and argsn:
|
||||
raise ValueError('Cannot handle both named and positional arguments')
|
||||
return {'version': '1.1',
|
||||
'method': self._service_name,
|
||||
'params': args or argsn,
|
||||
'id': AuthServiceProxy.__id_count}
|
||||
raise ValueError("Cannot handle both named and positional arguments")
|
||||
return {
|
||||
"version": "1.1",
|
||||
"method": self._service_name,
|
||||
"params": args or argsn,
|
||||
"id": AuthServiceProxy.__id_count,
|
||||
}
|
||||
|
||||
def __call__(self, *args, **argsn):
|
||||
postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
|
||||
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
|
||||
if response['error'] is not None:
|
||||
raise JSONRPCException(response['error'], status)
|
||||
elif 'result' not in response:
|
||||
raise JSONRPCException({
|
||||
'code': -343, 'message': 'missing JSON-RPC result'}, status)
|
||||
response, status = self._request("POST", self.__url.path, postdata.encode("utf-8"))
|
||||
if response["error"] is not None:
|
||||
raise JSONRPCException(response["error"], status)
|
||||
elif "result" not in response:
|
||||
raise JSONRPCException({"code": -343, "message": "missing JSON-RPC result"}, status)
|
||||
elif status != HTTPStatus.OK:
|
||||
raise JSONRPCException({
|
||||
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
|
||||
raise JSONRPCException({"code": -342, "message": "non-200 HTTP status code but no JSON-RPC error"}, status)
|
||||
else:
|
||||
return response['result']
|
||||
return response["result"]
|
||||
|
||||
def batch(self, rpc_call_list):
|
||||
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
|
||||
log.debug("--> " + postdata)
|
||||
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
|
||||
response, status = self._request("POST", self.__url.path, postdata.encode("utf-8"))
|
||||
if status != HTTPStatus.OK:
|
||||
raise JSONRPCException({
|
||||
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
|
||||
raise JSONRPCException({"code": -342, "message": "non-200 HTTP status code but no JSON-RPC error"}, status)
|
||||
return response
|
||||
|
||||
def _get_response(self):
|
||||
@@ -168,44 +171,55 @@ class AuthServiceProxy():
|
||||
try:
|
||||
http_response = self.__conn.getresponse()
|
||||
except socket.timeout:
|
||||
raise JSONRPCException({
|
||||
'code': -344,
|
||||
'message': '%r RPC took longer than %f seconds. Consider '
|
||||
'using larger timeout for calls that take '
|
||||
'longer to return.' % (self._service_name,
|
||||
self.__conn.timeout)})
|
||||
if http_response is None:
|
||||
raise JSONRPCException({
|
||||
'code': -342, 'message': 'missing HTTP response from server'})
|
||||
|
||||
content_type = http_response.getheader('Content-Type')
|
||||
if content_type != 'application/json':
|
||||
raise JSONRPCException(
|
||||
{'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (
|
||||
http_response.status, http_response.reason)},
|
||||
http_response.status)
|
||||
{
|
||||
"code": -344,
|
||||
"message": "%r RPC took longer than %f seconds. Consider "
|
||||
"using larger timeout for calls that take "
|
||||
"longer to return." % (self._service_name, self.__conn.timeout),
|
||||
}
|
||||
)
|
||||
if http_response is None:
|
||||
raise JSONRPCException({"code": -342, "message": "missing HTTP response from server"})
|
||||
|
||||
responsedata = http_response.read().decode('utf8')
|
||||
content_type = http_response.getheader("Content-Type")
|
||||
if content_type != "application/json":
|
||||
raise JSONRPCException(
|
||||
{
|
||||
"code": -342,
|
||||
"message": "non-JSON HTTP response with '%i %s' from server"
|
||||
% (http_response.status, http_response.reason),
|
||||
},
|
||||
http_response.status,
|
||||
)
|
||||
|
||||
responsedata = http_response.read().decode("utf8")
|
||||
response = json.loads(responsedata, parse_float=decimal.Decimal)
|
||||
elapsed = time.time() - req_start_time
|
||||
if "error" in response and response["error"] is None:
|
||||
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed,
|
||||
json.dumps(response["result"], default=EncodeDecimal,
|
||||
ensure_ascii=self.ensure_ascii)))
|
||||
log.debug(
|
||||
"<-%s- [%.6f] %s"
|
||||
% (
|
||||
response["id"],
|
||||
elapsed,
|
||||
json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
|
||||
return response, http_response.status
|
||||
|
||||
def __truediv__(self, relative_uri):
|
||||
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name,
|
||||
connection=self.__conn)
|
||||
return AuthServiceProxy(
|
||||
"{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn
|
||||
)
|
||||
|
||||
def _set_conn(self, connection=None):
|
||||
port = 80 if self.__url.port is None else self.__url.port
|
||||
if connection:
|
||||
self.__conn = connection
|
||||
self.timeout = connection.timeout
|
||||
elif self.__url.scheme == 'https':
|
||||
elif self.__url.scheme == "https":
|
||||
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout)
|
||||
else:
|
||||
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout)
|
||||
|
||||
@@ -7,6 +7,7 @@ from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
|
||||
# ToDo: #7-add-async-back-to-zmq
|
||||
class ZMQHandler:
|
||||
""" Adapted from https://github.com/bitcoin/bitcoin/blob/master/contrib/zmq/zmq_sub.py"""
|
||||
|
||||
def __init__(self, parent):
|
||||
self.zmqContext = zmq.Context()
|
||||
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
|
||||
@@ -27,7 +28,7 @@ class ZMQHandler:
|
||||
body = msg[1]
|
||||
|
||||
if topic == b"hashblock":
|
||||
block_hash = binascii.hexlify(body).decode('UTF-8')
|
||||
block_hash = binascii.hexlify(body).decode("UTF-8")
|
||||
block_queue.put(block_hash)
|
||||
|
||||
self.logger.info("New block received via ZMQ", block_hash=block_hash)
|
||||
|
||||
@@ -101,15 +101,19 @@ class Watcher:
|
||||
block = BlockProcessor.get_block(block_hash)
|
||||
|
||||
if block is not None:
|
||||
txids = block.get('tx')
|
||||
txids = block.get("tx")
|
||||
|
||||
logger.info("List of transactions.", txids=txids)
|
||||
|
||||
expired_appointments = [uuid for uuid, appointment in self.appointments.items()
|
||||
if block["height"] > appointment.end_time + EXPIRY_DELTA]
|
||||
expired_appointments = [
|
||||
uuid
|
||||
for uuid, appointment in self.appointments.items()
|
||||
if block["height"] > appointment.end_time + EXPIRY_DELTA
|
||||
]
|
||||
|
||||
Cleaner.delete_expired_appointment(expired_appointments, self.appointments, self.locator_uuid_map,
|
||||
self.db_manager)
|
||||
Cleaner.delete_expired_appointment(
|
||||
expired_appointments, self.appointments, self.locator_uuid_map, self.db_manager
|
||||
)
|
||||
|
||||
potential_matches = BlockProcessor.get_potential_matches(txids, self.locator_uuid_map)
|
||||
matches = BlockProcessor.get_matches(potential_matches, self.locator_uuid_map, self.appointments)
|
||||
@@ -117,11 +121,21 @@ class Watcher:
|
||||
for locator, uuid, dispute_txid, justice_txid, justice_rawtx in matches:
|
||||
# Errors decrypting the Blob will result in a None justice_txid
|
||||
if justice_txid is not None:
|
||||
logger.info("Notifying responder and deleting appointment.", justice_txid=justice_txid,
|
||||
locator=locator, uuid=uuid)
|
||||
logger.info(
|
||||
"Notifying responder and deleting appointment.",
|
||||
justice_txid=justice_txid,
|
||||
locator=locator,
|
||||
uuid=uuid,
|
||||
)
|
||||
|
||||
self.responder.add_response(uuid, dispute_txid, justice_txid, justice_rawtx,
|
||||
self.appointments[uuid].end_time, block_hash)
|
||||
self.responder.add_response(
|
||||
uuid,
|
||||
dispute_txid,
|
||||
justice_txid,
|
||||
justice_rawtx,
|
||||
self.appointments[uuid].end_time,
|
||||
block_hash,
|
||||
)
|
||||
|
||||
# Delete the appointment
|
||||
appointment = self.appointments.pop(uuid)
|
||||
|
||||
Reference in New Issue
Block a user