Reformats code to match code guidelines

This commit is contained in:
Sergi Delgado Segura
2019-11-07 15:23:43 +00:00
parent b16775a4aa
commit 20f296c9d4
42 changed files with 770 additions and 587 deletions

View File

@@ -2,12 +2,12 @@ import logging
from apps.cli.logger import Logger
# PISA-SERVER
DEFAULT_PISA_API_SERVER = 'btc.pisa.watch'
DEFAULT_PISA_API_SERVER = "btc.pisa.watch"
DEFAULT_PISA_API_PORT = 9814
# PISA-CLI
CLIENT_LOG_FILE = 'pisa-cli.log'
APPOINTMENTS_FOLDER_NAME = 'appointments'
CLIENT_LOG_FILE = "pisa-cli.log"
APPOINTMENTS_FOLDER_NAME = "appointments"
# CRYPTO
SUPPORTED_HASH_FUNCTIONS = ["SHA256"]
@@ -16,9 +16,8 @@ SUPPORTED_CIPHERS = ["AES-GCM-128"]
PISA_PUBLIC_KEY = "pisa_pk.pem"
# Configure logging
logging.basicConfig(format='%(message)s', level=logging.INFO, handlers=[
logging.FileHandler(CLIENT_LOG_FILE),
logging.StreamHandler()
])
logging.basicConfig(
format="%(message)s", level=logging.INFO, handlers=[logging.FileHandler(CLIENT_LOG_FILE), logging.StreamHandler()]
)
logger = Logger("Client")

View File

@@ -9,7 +9,7 @@ from apps.cli import logger
class Blob:
def __init__(self, data, cipher, hash_function):
if type(data) is not str or re.search(r'^[0-9A-Fa-f]+$', data) is None:
if type(data) is not str or re.search(r"^[0-9A-Fa-f]+$", data) is None:
raise ValueError("Non-Hex character found in txid.")
self.data = data
@@ -18,19 +18,23 @@ class Blob:
# FIXME: We only support SHA256 for now
if self.hash_function.upper() not in SUPPORTED_HASH_FUNCTIONS:
raise ValueError("Hash function not supported ({}). Supported Hash functions: {}"
.format(self.hash_function, SUPPORTED_HASH_FUNCTIONS))
raise ValueError(
"Hash function not supported ({}). Supported Hash functions: {}".format(
self.hash_function, SUPPORTED_HASH_FUNCTIONS
)
)
# FIXME: We only support AES-GCM-128 for now
if self.cipher.upper() not in SUPPORTED_CIPHERS:
raise ValueError("Cipher not supported ({}). Supported ciphers: {}".format(self.hash_function,
SUPPORTED_CIPHERS))
raise ValueError(
"Cipher not supported ({}). Supported ciphers: {}".format(self.hash_function, SUPPORTED_CIPHERS)
)
def encrypt(self, tx_id):
if len(tx_id) != 64:
raise ValueError("txid does not matches the expected size (32-byte / 64 hex chars).")
elif re.search(r'^[0-9A-Fa-f]+$', tx_id) is None:
elif re.search(r"^[0-9A-Fa-f]+$", tx_id) is None:
raise ValueError("Non-Hex character found in txid.")
# Transaction to be encrypted
@@ -50,10 +54,12 @@ class Blob:
encrypted_blob = aesgcm.encrypt(nonce=nonce, data=tx, associated_data=None)
encrypted_blob = hexlify(encrypted_blob).decode()
logger.info("Creating new blob",
master_key=hexlify(master_key).decode(),
sk=hexlify(sk).decode(),
nonce=hexlify(nonce).decode(),
encrypted_blob=encrypted_blob)
logger.info(
"Creating new blob",
master_key=hexlify(master_key).decode(),
sk=hexlify(sk).decode(),
nonce=hexlify(nonce).decode(),
encrypted_blob=encrypted_blob,
)
return encrypted_blob

View File

@@ -1,21 +1,25 @@
def help_add_appointment():
return "NAME:" \
"\tpython pisa-cli add_appointment - Registers a json formatted appointment to the PISA server." \
"\n\nUSAGE:" \
"\tpython pisa-cli add_appointment [command options] appointment/path_to_appointment_file" \
"\n\nDESCRIPTION:" \
"\n\n\tRegisters a json formatted appointment to the PISA server." \
"\n\tif -f, --file *is* specified, then the command expects a path to a json file instead of a json encoded " \
"\n\tstring as parameter." \
"\n\nOPTIONS:" \
"\n\t -f, --file path_to_json_file\t loads the appointment data from the specified json file instead of" \
return (
"NAME:"
"\tpython pisa-cli add_appointment - Registers a json formatted appointment to the PISA server."
"\n\nUSAGE:"
"\tpython pisa-cli add_appointment [command options] appointment/path_to_appointment_file"
"\n\nDESCRIPTION:"
"\n\n\tRegisters a json formatted appointment to the PISA server."
"\n\tif -f, --file *is* specified, then the command expects a path to a json file instead of a json encoded "
"\n\tstring as parameter."
"\n\nOPTIONS:"
"\n\t -f, --file path_to_json_file\t loads the appointment data from the specified json file instead of"
"\n\t\t\t\t\t command line"
)
def help_get_appointment():
return "NAME:" \
"\tpython pisa-cli get_appointment - Gets json formatted data about an appointment from the PISA server." \
"\n\nUSAGE:" \
"\tpython pisa-cli get_appointment appointment_locator" \
"\n\nDESCRIPTION:" \
"\n\n\tGets json formatted data about an appointment from the PISA server.\n"
return (
"NAME:"
"\tpython pisa-cli get_appointment - Gets json formatted data about an appointment from the PISA server."
"\n\nUSAGE:"
"\tpython pisa-cli get_appointment appointment_locator"
"\n\nDESCRIPTION:"
"\n\n\tGets json formatted data about an appointment from the PISA server.\n"
)

View File

@@ -11,7 +11,6 @@ from getopt import getopt, GetoptError
from requests import ConnectTimeout, ConnectionError
from uuid import uuid4
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.serialization import load_pem_public_key
@@ -34,14 +33,19 @@ def generate_dummy_appointment():
current_height = r.json().get("block_count")
dummy_appointment_data = {"tx": os.urandom(192).hex(), "tx_id": os.urandom(32).hex(),
"start_time": current_height + 5, "end_time": current_height + 10, "dispute_delta": 20}
dummy_appointment_data = {
"tx": os.urandom(192).hex(),
"tx_id": os.urandom(32).hex(),
"start_time": current_height + 5,
"end_time": current_height + 10,
"dispute_delta": 20,
}
print('Generating dummy appointment data:''\n\n' + json.dumps(dummy_appointment_data, indent=4, sort_keys=True))
print("Generating dummy appointment data:" "\n\n" + json.dumps(dummy_appointment_data, indent=4, sort_keys=True))
json.dump(dummy_appointment_data, open('dummy_appointment_data.json', 'w'))
json.dump(dummy_appointment_data, open("dummy_appointment_data.json", "w"))
print('\nData stored in dummy_appointment_data.json')
print("\nData stored in dummy_appointment_data.json")
# Loads and returns Pisa's public key from disk.
@@ -53,6 +57,7 @@ def load_pisa_public_key():
pubkey_pem = key_file.read().encode("utf-8")
pisa_public_key = load_pem_public_key(pubkey_pem, backend=default_backend())
return pisa_public_key
except UnsupportedAlgorithm:
raise ValueError("Could not deserialize the public key (unsupported algorithm).")
@@ -61,10 +66,11 @@ def load_pisa_public_key():
# returning True or False accordingly.
def is_appointment_signature_valid(appointment, signature, pk):
try:
sig_bytes = unhexlify(signature.encode('utf-8'))
data = json.dumps(appointment, sort_keys=True, separators=(',', ':')).encode("utf-8")
sig_bytes = unhexlify(signature.encode("utf-8"))
data = json.dumps(appointment, sort_keys=True, separators=(",", ":")).encode("utf-8")
pk.verify(sig_bytes, data, ec.ECDSA(hashes.SHA256()))
return True
except InvalidSignature:
return False
@@ -75,8 +81,9 @@ def save_signed_appointment(appointment, signature):
os.makedirs(APPOINTMENTS_FOLDER_NAME, exist_ok=True)
timestamp = int(time.time())
locator = appointment['locator']
locator = appointment["locator"]
uuid = uuid4().hex # prevent filename collisions
filename = "{}/appointment-{}-{}-{}.json".format(APPOINTMENTS_FOLDER_NAME, timestamp, locator, uuid)
data = {"appointment": appointment, "signature": signature}
@@ -95,10 +102,10 @@ def add_appointment(args):
arg_opt = args.pop(0)
try:
if arg_opt in ['-h', '--help']:
if arg_opt in ["-h", "--help"]:
sys.exit(help_add_appointment())
if arg_opt in ['-f', '--file']:
if arg_opt in ["-f", "--file"]:
fin = args.pop(0)
if not os.path.isfile(fin):
logger.error("Can't find file " + fin)
@@ -107,6 +114,7 @@ def add_appointment(args):
try:
with open(fin) as f:
appointment_data = json.load(f)
except IOError as e:
logger.error("I/O error({}): {}".format(e.errno, e.strerror))
return False
@@ -121,17 +129,21 @@ def add_appointment(args):
logger.error("The provided JSON is empty.")
return False
valid_locator = check_txid_format(appointment_data.get('tx_id'))
valid_locator = check_txid_format(appointment_data.get("tx_id"))
if not valid_locator:
logger.error("The provided locator is not valid.")
return False
add_appointment_endpoint = "http://{}:{}".format(pisa_api_server, pisa_api_port)
appointment = build_appointment(appointment_data.get('tx'), appointment_data.get('tx_id'),
appointment_data.get('start_time'), appointment_data.get('end_time'),
appointment_data.get('dispute_delta'))
appointment_json = json.dumps(appointment, sort_keys=True, separators=(',', ':'))
appointment = build_appointment(
appointment_data.get("tx"),
appointment_data.get("tx_id"),
appointment_data.get("start_time"),
appointment_data.get("end_time"),
appointment_data.get("dispute_delta"),
)
appointment_json = json.dumps(appointment, sort_keys=True, separators=(",", ":"))
logger.info("Sending appointment to PISA")
@@ -153,30 +165,33 @@ def add_appointment(args):
return False
if r.status_code != HTTP_OK:
if 'error' not in response_json:
logger.error("The server returned status code {}, but no error description."
.format(r.status_code))
if "error" not in response_json:
logger.error("The server returned status code {}, but no error description.".format(r.status_code))
else:
error = response_json['error']
logger.error("The server returned status code {}, and the following error: {}."
.format(r.status_code, error))
error = response_json["error"]
logger.error(
"The server returned status code {}, and the following error: {}.".format(r.status_code, error)
)
return False
if 'signature' not in response_json:
if "signature" not in response_json:
logger.error("The response does not contain the signature of the appointment.")
return False
signature = response_json['signature']
signature = response_json["signature"]
# verify that the returned signature is valid
try:
pk = load_pisa_public_key()
is_sig_valid = is_appointment_signature_valid(appointment, signature, pk)
except ValueError:
logger.error("Failed to deserialize the public key. It might be in an unsupported format.")
return False
except FileNotFoundError:
logger.error("Pisa's public key file not found. Please check your settings.")
return False
except IOError as e:
logger.error("I/O error({}): {}".format(e.errno, e.strerror))
return False
@@ -189,6 +204,7 @@ def add_appointment(args):
# all good, store appointment and signature
try:
save_signed_appointment(appointment, signature)
except OSError as e:
logger.error("There was an error while saving the appointment: {}".format(e))
return False
@@ -203,7 +219,7 @@ def get_appointment(args):
arg_opt = args.pop(0)
if arg_opt in ['-h', '--help']:
if arg_opt in ["-h", "--help"]:
sys.exit(help_get_appointment())
else:
locator = arg_opt
@@ -215,6 +231,7 @@ def get_appointment(args):
get_appointment_endpoint = "http://{}:{}/get_appointment".format(pisa_api_server, pisa_api_port)
parameters = "?locator={}".format(locator)
try:
r = requests.get(url=get_appointment_endpoint + parameters, timeout=5)
@@ -241,8 +258,14 @@ def build_appointment(tx, tx_id, start_time, end_time, dispute_delta):
encrypted_blob = blob.encrypt(tx_id)
appointment = {
'locator': locator, 'start_time': start_time, 'end_time': end_time, 'dispute_delta': dispute_delta,
'encrypted_blob': encrypted_blob, 'cipher': cipher, 'hash_function': hash_function}
"locator": locator,
"start_time": start_time,
"end_time": end_time,
"dispute_delta": dispute_delta,
"encrypted_blob": encrypted_blob,
"cipher": cipher,
"hash_function": hash_function,
}
return appointment
@@ -252,61 +275,62 @@ def check_txid_format(txid):
sys.exit("locator does not matches the expected size (32-byte / 64 hex chars).")
# TODO: #12-check-txid-regexp
return re.search(r'^[0-9A-Fa-f]+$', txid) is not None
return re.search(r"^[0-9A-Fa-f]+$", txid) is not None
def show_usage():
return ('USAGE: '
'\n\tpython pisa-cli.py [global options] command [command options] [arguments]'
'\n\nCOMMANDS:'
'\n\tadd_appointment \tRegisters a json formatted appointment to the PISA server.'
'\n\tget_appointment \tGets json formatted data about an appointment from the PISA server.'
'\n\thelp \t\t\tShows a list of commands or help for a specific command.'
'\n\nGLOBAL OPTIONS:'
'\n\t-s, --server \tAPI server where to send the requests. Defaults to btc.pisa.watch (modifiable in '
'__init__.py)'
'\n\t-p, --port \tAPI port where to send the requests. Defaults to 9814 (modifiable in __init__.py)'
'\n\t-d, --debug \tshows debug information and stores it in pisa.log'
'\n\t-h --help \tshows this message.')
return (
"USAGE: "
"\n\tpython pisa-cli.py [global options] command [command options] [arguments]"
"\n\nCOMMANDS:"
"\n\tadd_appointment \tRegisters a json formatted appointment to the PISA server."
"\n\tget_appointment \tGets json formatted data about an appointment from the PISA server."
"\n\thelp \t\t\tShows a list of commands or help for a specific command."
"\n\nGLOBAL OPTIONS:"
"\n\t-s, --server \tAPI server where to send the requests. Defaults to btc.pisa.watch (modifiable in "
"__init__.py)"
"\n\t-p, --port \tAPI port where to send the requests. Defaults to 9814 (modifiable in __init__.py)"
"\n\t-d, --debug \tshows debug information and stores it in pisa.log"
"\n\t-h --help \tshows this message."
)
if __name__ == '__main__':
if __name__ == "__main__":
pisa_api_server = DEFAULT_PISA_API_SERVER
pisa_api_port = DEFAULT_PISA_API_PORT
commands = ['add_appointment', 'get_appointment', 'help']
testing_commands = ['generate_dummy_appointment']
commands = ["add_appointment", "get_appointment", "help"]
testing_commands = ["generate_dummy_appointment"]
try:
opts, args = getopt(argv[1:], 's:p:h', ['server', 'port', 'help'])
opts, args = getopt(argv[1:], "s:p:h", ["server", "port", "help"])
for opt, arg in opts:
if opt in ['-s', 'server']:
if opt in ["-s", "server"]:
if arg:
pisa_api_server = arg
if opt in ['-p', '--port']:
if opt in ["-p", "--port"]:
if arg:
pisa_api_port = int(arg)
if opt in ['-h', '--help']:
if opt in ["-h", "--help"]:
sys.exit(show_usage())
if args:
command = args.pop(0)
if command in commands:
if command == 'add_appointment':
if command == "add_appointment":
add_appointment(args)
elif command == 'get_appointment':
elif command == "get_appointment":
get_appointment(args)
elif command == 'help':
elif command == "help":
if args:
command = args.pop(0)
if command == 'add_appointment':
if command == "add_appointment":
sys.exit(help_add_appointment())
elif command == "get_appointment":
@@ -320,7 +344,7 @@ if __name__ == '__main__':
# FIXME: testing command, not for production
elif command in testing_commands:
if command == 'generate_dummy_appointment':
if command == "generate_dummy_appointment":
generate_dummy_appointment()
else:

View File

@@ -9,39 +9,35 @@ from cryptography.hazmat.primitives.asymmetric import ec
# Simple tool to generate an ECDSA private key using the secp256k1 curve and save private and public keys
# as 'pisa_sk.pem' 'and pisa_pk.pem', respectively.
SK_FILE_NAME = 'pisa_sk.pem'
PK_FILE_NAME = 'pisa_pk.pem'
SK_FILE_NAME = "pisa_sk.pem"
PK_FILE_NAME = "pisa_pk.pem"
def save_sk(sk, filename):
pem = sk.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
encryption_algorithm=serialization.NoEncryption(),
)
with open(filename, 'wb') as pem_out:
with open(filename, "wb") as pem_out:
pem_out.write(pem)
def save_pk(pk, filename):
pem = pk.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
with open(filename, 'wb') as pem_out:
pem = pk.public_bytes(encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo)
with open(filename, "wb") as pem_out:
pem_out.write(pem)
if __name__ == '__main__':
if __name__ == "__main__":
if os.path.exists(SK_FILE_NAME):
print("A key with name \"{}\" already exists. Aborting.".format(SK_FILE_NAME))
print('A key with name "{}" already exists. Aborting.'.format(SK_FILE_NAME))
exit(1)
sk = ec.generate_private_key(
ec.SECP256K1, default_backend()
)
sk = ec.generate_private_key(ec.SECP256K1, default_backend())
pk = sk.public_key()
save_sk(sk, SK_FILE_NAME)
save_pk(pk, PK_FILE_NAME)
print("Saved private key \"{}\" and public key \"{}\".".format(SK_FILE_NAME, PK_FILE_NAME))
print('Saved private key "{}" and public key "{}".'.format(SK_FILE_NAME, PK_FILE_NAME))

View File

@@ -3,25 +3,25 @@ import logging
from pisa.utils.auth_proxy import AuthServiceProxy
import pisa.conf as conf
HOST = 'localhost'
HOST = "localhost"
PORT = 9814
# Create the file logger
f_logger = logging.getLogger('pisa_file_log')
f_logger = logging.getLogger("pisa_file_log")
f_logger.setLevel(logging.INFO)
fh = logging.FileHandler(conf.SERVER_LOG_FILE)
fh.setLevel(logging.INFO)
fh_formatter = logging.Formatter('%(message)s')
fh_formatter = logging.Formatter("%(message)s")
fh.setFormatter(fh_formatter)
f_logger.addHandler(fh)
# Create the console logger
c_logger = logging.getLogger('pisa_console_log')
c_logger = logging.getLogger("pisa_console_log")
c_logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch_formatter = logging.Formatter('%(asctime)s %(message)s', '%Y-%m-%d %H:%M:%S')
ch_formatter = logging.Formatter("%(asctime)s %(message)s", "%Y-%m-%d %H:%M:%S")
ch.setFormatter(ch_formatter)
c_logger.addHandler(ch)

View File

@@ -22,12 +22,12 @@ logger = Logger("API")
watcher = None
@app.route('/', methods=['POST'])
@app.route("/", methods=["POST"])
def add_appointment():
remote_addr = request.environ.get('REMOTE_ADDR')
remote_port = request.environ.get('REMOTE_PORT')
remote_addr = request.environ.get("REMOTE_ADDR")
remote_port = request.environ.get("REMOTE_PORT")
logger.info('Connection accepted', from_addr_port='{}:{}'.format(remote_addr, remote_port))
logger.info("Connection accepted", from_addr_port="{}:{}".format(remote_addr, remote_port))
# Check content type once if properly defined
request_data = json.loads(request.get_json())
@@ -42,7 +42,7 @@ def add_appointment():
if appointment_added:
rcode = HTTP_OK
response = {"locator": appointment.locator, "signature": hexlify(signature).decode('utf-8')}
response = {"locator": appointment.locator, "signature": hexlify(signature).decode("utf-8")}
else:
rcode = HTTP_SERVICE_UNAVAILABLE
error = "appointment rejected"
@@ -56,8 +56,12 @@ def add_appointment():
rcode = HTTP_BAD_REQUEST
error = "appointment rejected. Request does not match the standard"
logger.info('Sending response and disconnecting',
from_addr_port='{}:{}'.format(remote_addr, remote_port), response=response, error=error)
logger.info(
"Sending response and disconnecting",
from_addr_port="{}:{}".format(remote_addr, remote_port),
response=response,
error=error,
)
if error is None:
return jsonify(response), rcode
@@ -67,9 +71,9 @@ def add_appointment():
# FIXME: THE NEXT THREE API ENDPOINTS ARE FOR TESTING AND SHOULD BE REMOVED / PROPERLY MANAGED BEFORE PRODUCTION!
# ToDo: #17-add-api-keys
@app.route('/get_appointment', methods=['GET'])
@app.route("/get_appointment", methods=["GET"])
def get_appointment():
locator = request.args.get('locator')
locator = request.args.get("locator")
response = []
# ToDo: #15-add-system-monitor
@@ -79,7 +83,7 @@ def get_appointment():
if appointment_in_watcher:
for uuid in appointment_in_watcher:
appointment_data = watcher.appointments[uuid].to_dict()
appointment_data['status'] = "being_watched"
appointment_data["status"] = "being_watched"
response.append(appointment_data)
if watcher.responder:
@@ -88,7 +92,7 @@ def get_appointment():
for job in responder_jobs.values():
if job.locator == locator:
job_data = job.to_dict()
job_data['status'] = "dispute_responded"
job_data["status"] = "dispute_responded"
response.append(job_data)
if not response:
@@ -99,14 +103,14 @@ def get_appointment():
return response
@app.route('/get_all_appointments', methods=['GET'])
@app.route("/get_all_appointments", methods=["GET"])
def get_all_appointments():
watcher_appointments = {}
responder_jobs = {}
# ToDo: #15-add-system-monitor
if request.remote_addr in request.host or request.remote_addr == '127.0.0.1':
if request.remote_addr in request.host or request.remote_addr == "127.0.0.1":
for uuid, appointment in watcher.appointments.items():
watcher_appointments[uuid] = appointment.to_dict()
@@ -122,7 +126,7 @@ def get_all_appointments():
return response
@app.route('/get_block_count', methods=['GET'])
@app.route("/get_block_count", methods=["GET"])
def get_block_count():
return jsonify({"block_count": BlockProcessor.get_block_count()})
@@ -135,7 +139,7 @@ def start_api(w):
watcher = w
# Setting Flask log to ERROR only so it does not mess with out logging. Also disabling flask initial messages
logging.getLogger('werkzeug').setLevel(logging.ERROR)
os.environ['WERKZEUG_RUN_MAIN'] = 'true'
logging.getLogger("werkzeug").setLevel(logging.ERROR)
os.environ["WERKZEUG_RUN_MAIN"] = "true"
app.run(host=HOST, port=PORT)

View File

@@ -6,11 +6,12 @@ from pisa.encrypted_blob import EncryptedBlob
# Basic appointment structure
class Appointment:
# DISCUSS: 35-appointment-checks
def __init__(self, locator, start_time, end_time, dispute_delta, encrypted_blob, cipher, hash_function,
triggered=False):
def __init__(
self, locator, start_time, end_time, dispute_delta, encrypted_blob, cipher, hash_function, triggered=False
):
self.locator = locator
self.start_time = start_time # ToDo: #4-standardize-appointment-fields
self.end_time = end_time # ToDo: #4-standardize-appointment-fields
self.start_time = start_time # ToDo: #4-standardize-appointment-fields
self.end_time = end_time # ToDo: #4-standardize-appointment-fields
self.dispute_delta = dispute_delta
self.encrypted_blob = EncryptedBlob(encrypted_blob)
self.cipher = cipher
@@ -20,36 +21,55 @@ class Appointment:
@classmethod
def from_dict(cls, appointment_data):
locator = appointment_data.get("locator")
start_time = appointment_data.get("start_time") # ToDo: #4-standardize-appointment-fields
end_time = appointment_data.get("end_time") # ToDo: #4-standardize-appointment-fields
start_time = appointment_data.get("start_time") # ToDo: #4-standardize-appointment-fields
end_time = appointment_data.get("end_time") # ToDo: #4-standardize-appointment-fields
dispute_delta = appointment_data.get("dispute_delta")
encrypted_blob_data = appointment_data.get("encrypted_blob")
cipher = appointment_data.get("cipher")
hash_function = appointment_data.get("hash_function")
triggered = appointment_data.get("triggered")
if any(v is None for v in [locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher,
hash_function, triggered]):
if any(
v is None
for v in [
locator,
start_time,
end_time,
dispute_delta,
encrypted_blob_data,
cipher,
hash_function,
triggered,
]
):
raise ValueError("Wrong appointment data, some fields are missing")
else:
appointment = cls(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function,
triggered)
appointment = cls(
locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function, triggered
)
return appointment
def to_dict(self):
# ToDO: #3-improve-appointment-structure
appointment = {"locator": self.locator, "start_time": self.start_time, "end_time": self.end_time,
"dispute_delta": self.dispute_delta, "encrypted_blob": self.encrypted_blob.data,
"cipher": self.cipher, "hash_function": self.hash_function, "triggered": self.triggered}
appointment = {
"locator": self.locator,
"start_time": self.start_time,
"end_time": self.end_time,
"dispute_delta": self.dispute_delta,
"encrypted_blob": self.encrypted_blob.data,
"cipher": self.cipher,
"hash_function": self.hash_function,
"triggered": self.triggered,
}
return appointment
def to_json(self):
return json.dumps(self.to_dict(), sort_keys=True, separators=(',', ':'))
return json.dumps(self.to_dict(), sort_keys=True, separators=(",", ":"))
def serialize(self):
data = self.to_dict()
data.pop("triggered")
return json.dumps(data, sort_keys=True, separators=(',', ':')).encode("utf-8")
return json.dumps(data, sort_keys=True, separators=(",", ":")).encode("utf-8")

View File

@@ -101,7 +101,7 @@ class BlockProcessor:
try:
# ToDo: #20-test-tx-decrypting-edge-cases
justice_rawtx = appointments[uuid].encrypted_blob.decrypt(dispute_txid)
justice_txid = bitcoin_cli().decoderawtransaction(justice_rawtx).get('txid')
justice_txid = bitcoin_cli().decoderawtransaction(justice_rawtx).get("txid")
logger.info("Match found for locator.", locator=locator, uuid=uuid, justice_txid=justice_txid)
except JSONRPCException as e:

View File

@@ -5,7 +5,6 @@ from pisa.appointment import Appointment
class Builder:
@staticmethod
def build_appointments(appointments_data):
appointments = {}
@@ -48,4 +47,3 @@ class Builder:
block_queue.put(block)
return block_queue

View File

@@ -26,7 +26,7 @@ class Carrier:
receipt = Receipt(delivered=True)
except JSONRPCException as e:
errno = e.error.get('code')
errno = e.error.get("code")
# Since we're pushing a raw transaction to the network we can face several rejections
if errno == RPC_VERIFY_REJECTED:
# DISCUSS: 37-transaction-rejection
@@ -64,7 +64,7 @@ class Carrier:
else:
# If something else happens (unlikely but possible) log it so we can treat it in future releases
logger.error("JSONRPCException.", method='Carrier.send_transaction', error=e.error)
logger.error("JSONRPCException.", method="Carrier.send_transaction", error=e.error)
receipt = Receipt(delivered=False, reason=UNKNOWN_JSON_RPC_EXCEPTION)
return receipt
@@ -79,11 +79,11 @@ class Carrier:
# While it's quite unlikely, the transaction that was already in the blockchain could have been
# reorged while we were querying bitcoind to get the confirmation count. In such a case we just
# restart the job
if e.error.get('code') == RPC_INVALID_ADDRESS_OR_KEY:
if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY:
logger.info("Transaction got reorged before obtaining information", txid=txid)
else:
# If something else happens (unlikely but possible) log it so we can treat it in future releases
logger.error("JSONRPCException.", method='Carrier.get_transaction', error=e.error)
logger.error("JSONRPCException.", method="Carrier.get_transaction", error=e.error)
return tx_info

View File

@@ -28,8 +28,12 @@ class Cleaner:
@staticmethod
def delete_completed_jobs(jobs, tx_job_map, completed_jobs, height, db_manager):
for uuid, confirmations in completed_jobs:
logger.info("Job completed. Appointment ended after reaching enough confirmations.",
uuid=uuid, height=height, confirmations=confirmations)
logger.info(
"Job completed. Appointment ended after reaching enough confirmations.",
uuid=uuid,
height=height,
confirmations=confirmations,
)
# ToDo: #9-add-data-persistence
justice_txid = jobs[uuid].justice_txid

View File

@@ -9,7 +9,7 @@ WATCHER_PREFIX = "w"
WATCHER_LAST_BLOCK_KEY = "bw"
RESPONDER_PREFIX = "r"
RESPONDER_LAST_BLOCK_KEY = "br"
LOCATOR_MAP_PREFIX = 'm'
LOCATOR_MAP_PREFIX = "m"
class DBManager:
@@ -21,25 +21,25 @@ class DBManager:
self.db = plyvel.DB(db_path)
except plyvel.Error as e:
if 'create_if_missing is false' in str(e):
if "create_if_missing is false" in str(e):
logger.info("No db found. Creating a fresh one")
self.db = plyvel.DB(db_path, create_if_missing=True)
def load_appointments_db(self, prefix):
data = {}
for k, v in self.db.iterator(prefix=prefix.encode('utf-8')):
for k, v in self.db.iterator(prefix=prefix.encode("utf-8")):
# Get uuid and appointment_data from the db
uuid = k[len(prefix):].decode('utf-8')
uuid = k[len(prefix) :].decode("utf-8")
data[uuid] = json.loads(v)
return data
def get_last_known_block(self, key):
last_block = self.db.get(key.encode('utf-8'))
last_block = self.db.get(key.encode("utf-8"))
if last_block:
last_block = last_block.decode('utf-8')
last_block = last_block.decode("utf-8")
return last_block
@@ -47,23 +47,24 @@ class DBManager:
if isinstance(prefix, str):
key = prefix + key
key = key.encode('utf-8')
value = value.encode('utf-8')
key = key.encode("utf-8")
value = value.encode("utf-8")
self.db.put(key, value)
def delete_entry(self, key, prefix=None):
def delete_entry(self, key, prefix=None):
if isinstance(prefix, str):
key = prefix + key
key = key.encode('utf-8')
key = key.encode("utf-8")
self.db.delete(key)
def load_watcher_appointments(self):
all_appointments = self.load_appointments_db(prefix=WATCHER_PREFIX)
non_triggered_appointments = {uuid: appointment for uuid, appointment in all_appointments.items()
if appointment["triggered"] is False}
non_triggered_appointments = {
uuid: appointment for uuid, appointment in all_appointments.items() if appointment["triggered"] is False
}
return non_triggered_appointments
@@ -79,11 +80,11 @@ class DBManager:
logger.info("Adding appointment to Responder's db", uuid=uuid)
def load_locator_map(self, locator):
key = (LOCATOR_MAP_PREFIX+locator).encode('utf-8')
key = (LOCATOR_MAP_PREFIX + locator).encode("utf-8")
locator_map = self.db.get(key)
if locator_map is not None:
locator_map = json.loads(locator_map.decode('utf-8'))
locator_map = json.loads(locator_map.decode("utf-8"))
else:
logger.info("Locator not found in the db", locator=locator)
@@ -105,8 +106,8 @@ class DBManager:
locator_map = [uuid]
logger.info("Creating new locator map", locator=locator, uuid=uuid)
key = (LOCATOR_MAP_PREFIX + locator).encode('utf-8')
self.db.put(key, json.dumps(locator_map).encode('utf-8'))
key = (LOCATOR_MAP_PREFIX + locator).encode("utf-8")
self.db.put(key, json.dumps(locator_map).encode("utf-8"))
def delete_locator_map(self, locator):
self.delete_entry(locator, prefix=LOCATOR_MAP_PREFIX)

View File

@@ -26,11 +26,13 @@ class EncryptedBlob:
sk = master_key[:16]
nonce = master_key[16:]
logger.info("Creating new blob.",
master_key=hexlify(master_key).decode(),
sk=hexlify(sk).decode(),
nonce=hexlify(sk).decode(),
encrypted_blob=self.data)
logger.info(
"Creating new blob.",
master_key=hexlify(master_key).decode(),
sk=hexlify(sk).decode(),
nonce=hexlify(sk).decode(),
encrypted_blob=self.data,
)
# Decrypt
aesgcm = AESGCM(sk)
@@ -38,7 +40,7 @@ class EncryptedBlob:
try:
raw_tx = aesgcm.decrypt(nonce=nonce, data=data, associated_data=None)
hex_raw_tx = hexlify(raw_tx).decode('utf8')
hex_raw_tx = hexlify(raw_tx).decode("utf8")
except InvalidTag:
hex_raw_tx = None

View File

@@ -14,5 +14,3 @@ RPC_TX_REORGED_AFTER_BROADCAST = -98
# UNHANDLED
UNKNOWN_JSON_RPC_EXCEPTION = -99

View File

@@ -16,13 +16,13 @@ logger = Logger("Inspector")
class Inspector:
def inspect(self, data):
locator = data.get('locator')
start_time = data.get('start_time')
end_time = data.get('end_time')
dispute_delta = data.get('dispute_delta')
encrypted_blob = data.get('encrypted_blob')
cipher = data.get('cipher')
hash_function = data.get('hash_function')
locator = data.get("locator")
start_time = data.get("start_time")
end_time = data.get("end_time")
dispute_delta = data.get("dispute_delta")
encrypted_blob = data.get("encrypted_blob")
cipher = data.get("cipher")
hash_function = data.get("hash_function")
block_height = BlockProcessor.get_block_count()
@@ -61,14 +61,17 @@ class Inspector:
if locator is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty locator received"
elif type(locator) != str:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong locator data type ({})".format(type(locator))
elif len(locator) != 64:
rcode = errors.APPOINTMENT_WRONG_FIELD_SIZE
message = "wrong locator size ({})".format(len(locator))
# TODO: #12-check-txid-regexp
elif re.search(r'^[0-9A-Fa-f]+$', locator) is None:
elif re.search(r"^[0-9A-Fa-f]+$", locator) is None:
rcode = errors.APPOINTMENT_WRONG_FIELD_FORMAT
message = "wrong locator format ({})".format(locator)
@@ -90,9 +93,11 @@ class Inspector:
if start_time is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty start_time received"
elif t != int:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong start_time data type ({})".format(t)
elif start_time <= block_height:
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
if start_time < block_height:
@@ -118,21 +123,24 @@ class Inspector:
if end_time is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty end_time received"
elif t != int:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong end_time data type ({})".format(t)
elif start_time >= end_time:
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
if start_time > end_time:
message = "end_time is smaller than start_time"
else:
message = "end_time is equal to start_time"
elif block_height >= end_time:
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
if block_height > end_time:
message = 'end_time is in the past'
message = "end_time is in the past"
else:
message = 'end_time is too close to current height'
message = "end_time is too close to current height"
if message is not None:
logger.error(message)
@@ -149,13 +157,16 @@ class Inspector:
if dispute_delta is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty dispute_delta received"
elif t != int:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong dispute_delta data type ({})".format(t)
elif dispute_delta < conf.MIN_DISPUTE_DELTA:
rcode = errors.APPOINTMENT_FIELD_TOO_SMALL
message = "dispute delta too small. The dispute delta should be at least {} (current: {})".format(
conf.MIN_DISPUTE_DELTA, dispute_delta)
conf.MIN_DISPUTE_DELTA, dispute_delta
)
if message is not None:
logger.error(message)
@@ -173,10 +184,12 @@ class Inspector:
if encrypted_blob is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty encrypted_blob received"
elif t != str:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong encrypted_blob data type ({})".format(t)
elif re.search(r'^[0-9A-Fa-f]+$', encrypted_blob) is None:
elif re.search(r"^[0-9A-Fa-f]+$", encrypted_blob) is None:
rcode = errors.APPOINTMENT_WRONG_FIELD_FORMAT
message = "wrong encrypted_blob format ({})".format(encrypted_blob)
@@ -195,9 +208,11 @@ class Inspector:
if cipher is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty cipher received"
elif t != str:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong cipher data type ({})".format(t)
elif cipher.upper() not in conf.SUPPORTED_CIPHERS:
rcode = errors.APPOINTMENT_CIPHER_NOT_SUPPORTED
message = "cipher not supported: {}".format(cipher)
@@ -217,9 +232,11 @@ class Inspector:
if hash_function is None:
rcode = errors.APPOINTMENT_EMPTY_FIELD
message = "empty hash_function received"
elif t != str:
rcode = errors.APPOINTMENT_WRONG_FIELD_TYPE
message = "wrong hash_function data type ({})".format(t)
elif hash_function.upper() not in conf.SUPPORTED_HASH_FUNCTIONS:
rcode = errors.APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED
message = "hash_function not supported {}".format(hash_function)

View File

@@ -24,14 +24,14 @@ def handle_signals(signal_received, frame):
exit(0)
if __name__ == '__main__':
if __name__ == "__main__":
logger.info("Starting PISA")
signal(SIGINT, handle_signals)
signal(SIGTERM, handle_signals)
signal(SIGQUIT, handle_signals)
opts, _ = getopt(argv[1:], '', [''])
opts, _ = getopt(argv[1:], "", [""])
for opt, arg in opts:
# FIXME: Leaving this here for future option/arguments
pass
@@ -62,8 +62,11 @@ if __name__ == '__main__':
last_block_responder = db_manager.load_last_block_hash_responder()
missed_blocks_watcher = block_processor.get_missed_blocks(last_block_watcher)
missed_blocks_responder = missed_blocks_watcher if last_block_watcher == last_block_responder \
missed_blocks_responder = (
missed_blocks_watcher
if last_block_watcher == last_block_responder
else block_processor.get_missed_blocks(last_block_watcher)
)
responder = Responder(db_manager)
responder.jobs, responder.tx_job_map = Builder.build_jobs(responder_jobs_data)
@@ -79,4 +82,3 @@ if __name__ == '__main__':
except Exception as e:
logger.error("An error occurred: {}. Shutting down".format(e))
exit(1)

View File

@@ -44,8 +44,13 @@ class Job:
return job
def to_dict(self):
job = {"locator": self.locator, "dispute_txid": self.dispute_txid, "justice_txid": self.justice_txid,
"justice_rawtx": self.justice_rawtx, "appointment_end": self.appointment_end}
job = {
"locator": self.locator,
"dispute_txid": self.dispute_txid,
"justice_txid": self.justice_txid,
"justice_rawtx": self.justice_rawtx,
"appointment_end": self.appointment_end,
}
return job
@@ -113,8 +118,9 @@ class Responder:
self.db_manager.store_responder_job(uuid, job.to_json())
logger.info("New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid,
appointment_end=appointment_end)
logger.info(
"New job added.", dispute_txid=dispute_txid, justice_txid=justice_txid, appointment_end=appointment_end
)
if self.asleep:
self.asleep = False
@@ -124,7 +130,7 @@ class Responder:
responder.start()
def do_subscribe(self):
self.zmq_subscriber = ZMQHandler(parent='Responder')
self.zmq_subscriber = ZMQHandler(parent="Responder")
self.zmq_subscriber.handle(self.block_queue)
def do_watch(self):
@@ -138,16 +144,18 @@ class Responder:
block = BlockProcessor.get_block(block_hash)
if block is not None:
txs = block.get('tx')
height = block.get('height')
txs = block.get("tx")
height = block.get("height")
logger.info("New block received",
block_hash=block_hash, prev_block_hash=block.get('previousblockhash'), txs=txs)
logger.info(
"New block received", block_hash=block_hash, prev_block_hash=block.get("previousblockhash"), txs=txs
)
# ToDo: #9-add-data-persistence
if prev_block_hash == block.get('previousblockhash'):
if prev_block_hash == block.get("previousblockhash"):
self.unconfirmed_txs, self.missed_confirmations = BlockProcessor.check_confirmations(
txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations)
txs, self.unconfirmed_txs, self.tx_job_map, self.missed_confirmations
)
txs_to_rebroadcast = self.get_txs_to_rebroadcast(txs)
completed_jobs = self.get_completed_jobs(height)
@@ -157,8 +165,11 @@ class Responder:
# NOTCOVERED
else:
logger.warning("Reorg found", local_prev_block_hash=prev_block_hash,
remote_prev_block_hash=block.get('previousblockhash'))
logger.warning(
"Reorg found",
local_prev_block_hash=prev_block_hash,
remote_prev_block_hash=block.get("previousblockhash"),
)
# ToDo: #24-properly-handle-reorgs
self.handle_reorgs()
@@ -166,7 +177,7 @@ class Responder:
# Register the last processed block for the responder
self.db_manager.store_last_block_hash_responder(block_hash)
prev_block_hash = block.get('hash')
prev_block_hash = block.get("hash")
# Go back to sleep if there are no more jobs
self.asleep = True
@@ -194,7 +205,7 @@ class Responder:
# FIXME: Should be improved with the librarian
if tx is not None:
confirmations = tx.get('confirmations')
confirmations = tx.get("confirmations")
if confirmations >= MIN_CONFIRMATIONS:
# The end of the appointment has been reached
@@ -213,11 +224,21 @@ class Responder:
for uuid in self.tx_job_map[txid]:
job = self.jobs[uuid]
receipt = self.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx,
job.appointment_end, block_hash, retry=True)
receipt = self.add_response(
uuid,
job.dispute_txid,
job.justice_txid,
job.justice_rawtx,
job.appointment_end,
block_hash,
retry=True,
)
logger.warning("Transaction has missed many confirmations. Rebroadcasting.",
justice_txid=job.justice_txid, confirmations_missed=CONFIRMATIONS_BEFORE_RETRY)
logger.warning(
"Transaction has missed many confirmations. Rebroadcasting.",
justice_txid=job.justice_txid,
confirmations_missed=CONFIRMATIONS_BEFORE_RETRY,
)
receipts.append((txid, receipt))
@@ -229,19 +250,22 @@ class Responder:
for uuid, job in self.jobs.items():
# First we check if the dispute transaction is still in the blockchain. If not, the justice can not be
# there either, so we'll need to call the reorg manager straight away
dispute_in_chain, _ = check_tx_in_chain(job.dispute_txid, logger=logger, tx_label='Dispute tx')
dispute_in_chain, _ = check_tx_in_chain(job.dispute_txid, logger=logger, tx_label="Dispute tx")
# If the dispute is there, we can check the justice tx
if dispute_in_chain:
justice_in_chain, justice_confirmations = check_tx_in_chain(job.justice_txid, logger=logger,
tx_label='Justice tx')
justice_in_chain, justice_confirmations = check_tx_in_chain(
job.justice_txid, logger=logger, tx_label="Justice tx"
)
# If both transactions are there, we only need to update the justice tx confirmation count
if justice_in_chain:
logger.info("Updating confirmation count for transaction.",
justice_txid=job.justice_txid,
prev_count=job.confirmations,
curr_count=justice_confirmations)
logger.info(
"Updating confirmation count for transaction.",
justice_txid=job.justice_txid,
prev_count=job.confirmations,
curr_count=justice_confirmations,
)
job.confirmations = justice_confirmations

View File

@@ -1,42 +1,44 @@
# Ported from https://github.com/bitcoin/bitcoin/blob/0.18/src/rpc/protocol.h
# General application defined errors
RPC_MISC_ERROR = -1 # std::exception thrown in command handling
RPC_TYPE_ERROR = -3 # Unexpected type was passed as parameter
RPC_INVALID_ADDRESS_OR_KEY = -5 # Invalid address or key
RPC_OUT_OF_MEMORY = -7 # Ran out of memory during operation
RPC_INVALID_PARAMETER = -8 # Invalid missing or duplicate parameter
RPC_DATABASE_ERROR = -20 # Database error
RPC_DESERIALIZATION_ERROR = -22 # Error parsing or validating structure in raw format
RPC_VERIFY_ERROR = -25 # General error during transaction or block submission
RPC_VERIFY_REJECTED = -26 # Transaction or block was rejected by network rules
RPC_VERIFY_ALREADY_IN_CHAIN = -27 # Transaction already in chain
RPC_IN_WARMUP = -28 # Client still warming up
RPC_METHOD_DEPRECATED = -32 # RPC method is deprecated
RPC_MISC_ERROR = -1 # std::exception thrown in command handling
RPC_TYPE_ERROR = -3 # Unexpected type was passed as parameter
RPC_INVALID_ADDRESS_OR_KEY = -5 # Invalid address or key
RPC_OUT_OF_MEMORY = -7 # Ran out of memory during operation
RPC_INVALID_PARAMETER = -8 # Invalid missing or duplicate parameter
RPC_DATABASE_ERROR = -20 # Database error
RPC_DESERIALIZATION_ERROR = -22 # Error parsing or validating structure in raw format
RPC_VERIFY_ERROR = -25 # General error during transaction or block submission
RPC_VERIFY_REJECTED = -26 # Transaction or block was rejected by network rules
RPC_VERIFY_ALREADY_IN_CHAIN = -27 # Transaction already in chain
RPC_IN_WARMUP = -28 # Client still warming up
RPC_METHOD_DEPRECATED = -32 # RPC method is deprecated
# Aliases for backward compatibility
RPC_TRANSACTION_ERROR = RPC_VERIFY_ERROR
RPC_TRANSACTION_REJECTED = RPC_VERIFY_REJECTED
RPC_TRANSACTION_ALREADY_IN_CHAIN= RPC_VERIFY_ALREADY_IN_CHAIN
RPC_TRANSACTION_ERROR = RPC_VERIFY_ERROR
RPC_TRANSACTION_REJECTED = RPC_VERIFY_REJECTED
RPC_TRANSACTION_ALREADY_IN_CHAIN = RPC_VERIFY_ALREADY_IN_CHAIN
# P2P client errors
RPC_CLIENT_NOT_CONNECTED = -9 # Bitcoin is not connected
RPC_CLIENT_IN_INITIAL_DOWNLOAD = -10 # Still downloading initial blocks
RPC_CLIENT_NODE_ALREADY_ADDED = -23 # Node is already added
RPC_CLIENT_NODE_NOT_ADDED = -24 # Node has not been added before
RPC_CLIENT_NODE_NOT_CONNECTED = -29 # Node to disconnect not found in connected nodes
RPC_CLIENT_NOT_CONNECTED = -9 # Bitcoin is not connected
RPC_CLIENT_IN_INITIAL_DOWNLOAD = -10 # Still downloading initial blocks
RPC_CLIENT_NODE_ALREADY_ADDED = -23 # Node is already added
RPC_CLIENT_NODE_NOT_ADDED = -24 # Node has not been added before
RPC_CLIENT_NODE_NOT_CONNECTED = -29 # Node to disconnect not found in connected nodes
RPC_CLIENT_INVALID_IP_OR_SUBNET = -30 # Invalid IP/Subnet
RPC_CLIENT_P2P_DISABLED = -31 # No valid connection manager instance found
RPC_CLIENT_P2P_DISABLED = -31 # No valid connection manager instance found
# Wallet errors
RPC_WALLET_ERROR = -4 # Unspecified problem with wallet (key not found etc.)
RPC_WALLET_INSUFFICIENT_FUNDS = -6 # Not enough funds in wallet or account
RPC_WALLET_INVALID_LABEL_NAME = -11 # Invalid label name
RPC_WALLET_KEYPOOL_RAN_OUT = -12 # Keypool ran out call keypoolrefill first
RPC_WALLET_UNLOCK_NEEDED = -13 # Enter the wallet passphrase with walletpassphrase first
RPC_WALLET_ERROR = -4 # Unspecified problem with wallet (key not found etc.)
RPC_WALLET_INSUFFICIENT_FUNDS = -6 # Not enough funds in wallet or account
RPC_WALLET_INVALID_LABEL_NAME = -11 # Invalid label name
RPC_WALLET_KEYPOOL_RAN_OUT = -12 # Keypool ran out call keypoolrefill first
RPC_WALLET_UNLOCK_NEEDED = -13 # Enter the wallet passphrase with walletpassphrase first
RPC_WALLET_PASSPHRASE_INCORRECT = -14 # The wallet passphrase entered was incorrect
RPC_WALLET_WRONG_ENC_STATE = -15 # Command given in wrong wallet encryption state (encrypting an encrypted wallet etc.)
RPC_WALLET_ENCRYPTION_FAILED = -16 # Failed to encrypt the wallet
RPC_WALLET_ALREADY_UNLOCKED = -17 # Wallet is already unlocked
RPC_WALLET_NOT_FOUND = -18 # Invalid wallet specified
RPC_WALLET_NOT_SPECIFIED = -19 # No wallet specified (error when there are multiple wallets loaded)
RPC_WALLET_WRONG_ENC_STATE = (
-15
) # Command given in wrong wallet encryption state (encrypting an encrypted wallet etc.)
RPC_WALLET_ENCRYPTION_FAILED = -16 # Failed to encrypt the wallet
RPC_WALLET_ALREADY_UNLOCKED = -17 # Wallet is already unlocked
RPC_WALLET_NOT_FOUND = -18 # Invalid wallet specified
RPC_WALLET_NOT_SPECIFIED = -19 # No wallet specified (error when there are multiple wallets loaded)

View File

@@ -14,11 +14,11 @@ FEED_PORT = None
MAX_APPOINTMENTS = 100
EXPIRY_DELTA = 6
MIN_DISPUTE_DELTA = 20
SERVER_LOG_FILE = 'pisa.log'
PISA_SECRET_KEY = 'pisa_sk.pem'
SERVER_LOG_FILE = "pisa.log"
PISA_SECRET_KEY = "pisa_sk.pem"
# PISA-CLI
CLIENT_LOG_FILE = 'pisa.log'
CLIENT_LOG_FILE = "pisa.log"
# TEST
TEST_LOG_FILE = "test.log"

View File

@@ -9,13 +9,14 @@ from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
# NOTCOVERED
def bitcoin_cli():
return AuthServiceProxy("http://%s:%s@%s:%d" % (conf.BTC_RPC_USER, conf.BTC_RPC_PASSWD, conf.BTC_RPC_HOST,
conf.BTC_RPC_PORT))
return AuthServiceProxy(
"http://%s:%s@%s:%d" % (conf.BTC_RPC_USER, conf.BTC_RPC_PASSWD, conf.BTC_RPC_HOST, conf.BTC_RPC_PORT)
)
# TODO: currently only used in the Responder; might move there or in the BlockProcessor
# NOTCOVERED
def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
def check_tx_in_chain(tx_id, logger=Logger(), tx_label="Transaction"):
tx_in_chain = False
confirmations = 0
@@ -31,12 +32,12 @@ def check_tx_in_chain(tx_id, logger=Logger(), tx_label='Transaction'):
logger.error("{} found in mempool".format(tx_label), txid=tx_id)
except JSONRPCException as e:
if e.error.get('code') == RPC_INVALID_ADDRESS_OR_KEY:
if e.error.get("code") == RPC_INVALID_ADDRESS_OR_KEY:
logger.error("{} not found in mempool nor blockchain".format(tx_label), txid=tx_id)
else:
# ToDO: Unhandled errors, check this properly
logger.error("JSONRPCException.", method='tools.check_tx_in_chain', error=e.error)
logger.error("JSONRPCException.", method="tools.check_tx_in_chain", error=e.error)
return tx_in_chain, confirmations
@@ -60,11 +61,11 @@ def in_correct_network(network):
genesis_block_hash = bitcoin_cli().getblockhash(0)
if network == 'mainnet' and genesis_block_hash == mainnet_genesis_block_hash:
if network == "mainnet" and genesis_block_hash == mainnet_genesis_block_hash:
correct_network = True
elif network == 'testnet' and genesis_block_hash == testnet3_genesis_block_hash:
elif network == "testnet" and genesis_block_hash == testnet3_genesis_block_hash:
correct_network = True
elif network == 'regtest' and genesis_block_hash not in [mainnet_genesis_block_hash, testnet3_genesis_block_hash]:
elif network == "regtest" and genesis_block_hash not in [mainnet_genesis_block_hash, testnet3_genesis_block_hash]:
correct_network = True
return correct_network
@@ -72,5 +73,4 @@ def in_correct_network(network):
def check_txid_format(txid):
# TODO: #12-check-txid-regexp
return isinstance(txid, str) and re.search(r'^[0-9A-Fa-f]{64}$', txid) is not None
return isinstance(txid, str) and re.search(r"^[0-9A-Fa-f]{64}$", txid) is not None

View File

@@ -57,9 +57,9 @@ log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error, http_status=None):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
errmsg = "%(message)s (%(code)i)" % rpc_error
except (KeyError, TypeError):
errmsg = ''
errmsg = ""
super().__init__(errmsg)
self.error = rpc_error
self.http_status = http_status
@@ -71,7 +71,7 @@ def EncodeDecimal(o):
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy():
class AuthServiceProxy:
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
@@ -80,15 +80,15 @@ class AuthServiceProxy():
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urllib.parse.urlparse(service_url)
user = None if self.__url.username is None else self.__url.username.encode('utf8')
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
user = None if self.__url.username is None else self.__url.username.encode("utf8")
passwd = None if self.__url.password is None else self.__url.password.encode("utf8")
authpair = user + b":" + passwd
self.__auth_header = b"Basic " + base64.b64encode(authpair)
self.timeout = timeout
self._set_conn(connection)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
if name.startswith("__") and name.endswith("__"):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
@@ -96,15 +96,17 @@ class AuthServiceProxy():
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
"""
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
if os.name == 'nt':
"""
headers = {
"Host": self.__url.hostname,
"User-Agent": USER_AGENT,
"Authorization": self.__auth_header,
"Content-type": "application/json",
}
if os.name == "nt":
# Windows somehow does not like to re-use connections
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
self._set_conn()
@@ -128,39 +130,40 @@ class AuthServiceProxy():
def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-{}-> {} {}".format(
AuthServiceProxy.__id_count,
self._service_name,
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
))
log.debug(
"-{}-> {} {}".format(
AuthServiceProxy.__id_count,
self._service_name,
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
)
)
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
return {'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}
raise ValueError("Cannot handle both named and positional arguments")
return {
"version": "1.1",
"method": self._service_name,
"params": args or argsn,
"id": AuthServiceProxy.__id_count,
}
def __call__(self, *args, **argsn):
postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'], status)
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'}, status)
response, status = self._request("POST", self.__url.path, postdata.encode("utf-8"))
if response["error"] is not None:
raise JSONRPCException(response["error"], status)
elif "result" not in response:
raise JSONRPCException({"code": -343, "message": "missing JSON-RPC result"}, status)
elif status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
raise JSONRPCException({"code": -342, "message": "non-200 HTTP status code but no JSON-RPC error"}, status)
else:
return response['result']
return response["result"]
def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> " + postdata)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
response, status = self._request("POST", self.__url.path, postdata.encode("utf-8"))
if status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
raise JSONRPCException({"code": -342, "message": "non-200 HTTP status code but no JSON-RPC error"}, status)
return response
def _get_response(self):
@@ -168,44 +171,55 @@ class AuthServiceProxy():
try:
http_response = self.__conn.getresponse()
except socket.timeout:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException(
{'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (
http_response.status, http_response.reason)},
http_response.status)
{
"code": -344,
"message": "%r RPC took longer than %f seconds. Consider "
"using larger timeout for calls that take "
"longer to return." % (self._service_name, self.__conn.timeout),
}
)
if http_response is None:
raise JSONRPCException({"code": -342, "message": "missing HTTP response from server"})
responsedata = http_response.read().decode('utf8')
content_type = http_response.getheader("Content-Type")
if content_type != "application/json":
raise JSONRPCException(
{
"code": -342,
"message": "non-JSON HTTP response with '%i %s' from server"
% (http_response.status, http_response.reason),
},
http_response.status,
)
responsedata = http_response.read().decode("utf8")
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed,
json.dumps(response["result"], default=EncodeDecimal,
ensure_ascii=self.ensure_ascii)))
log.debug(
"<-%s- [%.6f] %s"
% (
response["id"],
elapsed,
json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
)
)
else:
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response, http_response.status
def __truediv__(self, relative_uri):
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name,
connection=self.__conn)
return AuthServiceProxy(
"{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn
)
def _set_conn(self, connection=None):
port = 80 if self.__url.port is None else self.__url.port
if connection:
self.__conn = connection
self.timeout = connection.timeout
elif self.__url.scheme == 'https':
elif self.__url.scheme == "https":
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout)
else:
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout)

View File

@@ -7,6 +7,7 @@ from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
# ToDo: #7-add-async-back-to-zmq
class ZMQHandler:
""" Adapted from https://github.com/bitcoin/bitcoin/blob/master/contrib/zmq/zmq_sub.py"""
def __init__(self, parent):
self.zmqContext = zmq.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
@@ -27,7 +28,7 @@ class ZMQHandler:
body = msg[1]
if topic == b"hashblock":
block_hash = binascii.hexlify(body).decode('UTF-8')
block_hash = binascii.hexlify(body).decode("UTF-8")
block_queue.put(block_hash)
self.logger.info("New block received via ZMQ", block_hash=block_hash)

View File

@@ -101,15 +101,19 @@ class Watcher:
block = BlockProcessor.get_block(block_hash)
if block is not None:
txids = block.get('tx')
txids = block.get("tx")
logger.info("List of transactions.", txids=txids)
expired_appointments = [uuid for uuid, appointment in self.appointments.items()
if block["height"] > appointment.end_time + EXPIRY_DELTA]
expired_appointments = [
uuid
for uuid, appointment in self.appointments.items()
if block["height"] > appointment.end_time + EXPIRY_DELTA
]
Cleaner.delete_expired_appointment(expired_appointments, self.appointments, self.locator_uuid_map,
self.db_manager)
Cleaner.delete_expired_appointment(
expired_appointments, self.appointments, self.locator_uuid_map, self.db_manager
)
potential_matches = BlockProcessor.get_potential_matches(txids, self.locator_uuid_map)
matches = BlockProcessor.get_matches(potential_matches, self.locator_uuid_map, self.appointments)
@@ -117,11 +121,21 @@ class Watcher:
for locator, uuid, dispute_txid, justice_txid, justice_rawtx in matches:
# Errors decrypting the Blob will result in a None justice_txid
if justice_txid is not None:
logger.info("Notifying responder and deleting appointment.", justice_txid=justice_txid,
locator=locator, uuid=uuid)
logger.info(
"Notifying responder and deleting appointment.",
justice_txid=justice_txid,
locator=locator,
uuid=uuid,
)
self.responder.add_response(uuid, dispute_txid, justice_txid, justice_rawtx,
self.appointments[uuid].end_time, block_hash)
self.responder.add_response(
uuid,
dispute_txid,
justice_txid,
justice_rawtx,
self.appointments[uuid].end_time,
block_hash,
)
# Delete the appointment
appointment = self.appointments.pop(uuid)

View File

@@ -1,6 +1,4 @@
import pytest
import responses
import requests
import json
from binascii import hexlify
@@ -9,7 +7,6 @@ from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
import apps.cli.pisa_cli as pisa_cli
from apps.cli import PISA_PUBLIC_KEY
from test.unit.conftest import get_random_value_hex
# TODO: should find a way of doing without this

View File

@@ -13,8 +13,8 @@ from test.simulator.zmq_publisher import ZMQPublisher
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
app = Flask(__name__)
HOST = 'localhost'
PORT = '18443'
HOST = "localhost"
PORT = "18443"
blockchain = []
blocks = {}
@@ -24,20 +24,20 @@ mempool = []
mine_new_block = Event()
TIME_BETWEEN_BLOCKS = 5
GENESIS_PARENT = '0000000000000000000000000000000000000000000000000000000000000000'
GENESIS_PARENT = "0000000000000000000000000000000000000000000000000000000000000000"
prev_block_hash = GENESIS_PARENT
@app.route('/generate', methods=['POST'])
@app.route("/generate", methods=["POST"])
def generate():
global mine_new_block
mine_new_block.set()
return Response(status=200, mimetype='application/json')
return Response(status=200, mimetype="application/json")
@app.route('/fork', methods=['POST'])
@app.route("/fork", methods=["POST"])
def create_fork():
"""
create_fork processes chain fork requests. It will create a fork with the following parameters:
@@ -67,10 +67,10 @@ def create_fork():
# block heights and blockchain length is currently incorrect. It does the trick to test forks, but should
# be fixed for better testing.
return Response(json.dumps(response), status=200, mimetype='application/json')
return Response(json.dumps(response), status=200, mimetype="application/json")
@app.route('/', methods=['POST'])
@app.route("/", methods=["POST"])
def process_request():
"""
process_requests simulates the bitcoin-rpc server run by bitcoind. The available commands are limited to the ones
@@ -108,7 +108,7 @@ def process_request():
global mempool
request_data = request.get_json()
method = request_data.get('method')
method = request_data.get("method")
response = {"id": 0, "result": 0, "error": None}
no_param_err = {"code": RPC_MISC_ERROR, "message": "JSON value is not a {} as expected"}
@@ -142,8 +142,10 @@ def process_request():
response["result"] = {"txid": txid}
else:
response["error"] = {"code": RPC_VERIFY_ALREADY_IN_CHAIN,
"message": "Transaction already in block chain"}
response["error"] = {
"code": RPC_VERIFY_ALREADY_IN_CHAIN,
"message": "Transaction already in block chain",
}
else:
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
@@ -158,16 +160,18 @@ def process_request():
if isinstance(txid, str):
if txid in mined_transactions:
block = blocks.get(mined_transactions[txid]["block"])
rawtx = mined_transactions[txid].get('tx')
response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get('height')}
rawtx = mined_transactions[txid].get("tx")
response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get("height")}
elif txid in mempool:
response["result"] = {"confirmations": 0}
else:
response["error"] = {'code': RPC_INVALID_ADDRESS_OR_KEY,
'message': 'No such mempool or blockchain transaction. Use gettransaction for '
'wallet transactions.'}
response["error"] = {
"code": RPC_INVALID_ADDRESS_OR_KEY,
"message": "No such mempool or blockchain transaction. Use gettransaction for "
"wallet transactions.",
}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
@@ -219,7 +223,7 @@ def process_request():
else:
return abort(404, "Method not found")
return Response(json.dumps(response), status=200, mimetype='application/json')
return Response(json.dumps(response), status=200, mimetype="application/json")
def get_param(request_data):
@@ -240,8 +244,9 @@ def load_data():
def simulate_mining(mode, time_between_blocks):
global mempool, mined_transactions, blocks, blockchain, mine_new_block, prev_block_hash
mining_simulator = ZMQPublisher(topic=b'hashblock', feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR,
feed_port=FEED_PORT)
mining_simulator = ZMQPublisher(
topic=b"hashblock", feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR, feed_port=FEED_PORT
)
# Set the mining event to initialize the blockchain with a block
mine_new_block.set()
@@ -266,8 +271,12 @@ def simulate_mining(mode, time_between_blocks):
mined_transactions[txid] = {"tx": tx, "block": block_hash}
# FIXME: chain_work is being defined as a incremental counter for now. Multiple chains should be possible.
blocks[block_hash] = {"tx": list(txs_to_mine.keys()), "height": len(blockchain), "previousblockhash": prev_block_hash,
"chainwork": '{:x}'.format(len(blockchain))}
blocks[block_hash] = {
"tx": list(txs_to_mine.keys()),
"height": len(blockchain),
"previousblockhash": prev_block_hash,
"chainwork": "{:x}".format(len(blockchain)),
}
mining_simulator.publish_data(binascii.unhexlify(block_hash))
blockchain.append(block_hash)
@@ -276,22 +285,22 @@ def simulate_mining(mode, time_between_blocks):
print("New block mined: {}".format(block_hash))
print("\tTransactions: {}".format(list(txs_to_mine.keys())))
if mode == 'time':
if mode == "time":
time.sleep(time_between_blocks)
else:
mine_new_block.clear()
def run_simulator(mode='time', time_between_blocks=TIME_BETWEEN_BLOCKS):
if mode not in ["time", 'event']:
def run_simulator(mode="time", time_between_blocks=TIME_BETWEEN_BLOCKS):
if mode not in ["time", "event"]:
raise ValueError("Node must be time or event")
mining_thread = Thread(target=simulate_mining, args=[mode, time_between_blocks])
mining_thread.start()
# Setting Flask log to ERROR only so it does not mess with out logging. Also disabling flask initial messages
logging.getLogger('werkzeug').setLevel(logging.ERROR)
os.environ['WERKZEUG_RUN_MAIN'] = 'true'
logging.getLogger("werkzeug").setLevel(logging.ERROR)
os.environ["WERKZEUG_RUN_MAIN"] = "true"
app.run(host=HOST, port=PORT)

View File

@@ -9,12 +9,12 @@ from test.simulator.bitcoind_sim import run_simulator
from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT
MIXED_VALUES = values = [-1, 500, '', '111', [], 1.1, None, '', "a" * 31, "b" * 33, get_random_value_hex(32)]
MIXED_VALUES = values = [-1, 500, "", "111", [], 1.1, None, "", "a" * 31, "b" * 33, get_random_value_hex(32)]
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT))
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def run_bitcoind():
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event"})
bitcoind_thread.daemon = True
@@ -31,19 +31,20 @@ def genesis_block_hash(run_bitcoind):
def check_hash_format(txid):
# TODO: #12-check-txid-regexp
return isinstance(txid, str) and re.search(r'^[0-9A-Fa-f]{64}$', txid) is not None
return isinstance(txid, str) and re.search(r"^[0-9A-Fa-f]{64}$", txid) is not None
def test_help(run_bitcoind):
# Help should always return 0
assert(bitcoin_cli.help() == 0)
assert bitcoin_cli.help() == 0
# FIXME: Better assert for the exceptions would be nice (check the returned errno is the expected one)
def test_getblockhash(genesis_block_hash):
# First block
assert(check_hash_format(genesis_block_hash))
assert check_hash_format(genesis_block_hash)
# Check that the values are within range and of the proper format (all should fail)
for v in MIXED_VALUES:
@@ -57,9 +58,9 @@ def test_getblockhash(genesis_block_hash):
def test_get_block(genesis_block_hash):
# getblock should return a list of transactions and the height
block = bitcoin_cli.getblock(genesis_block_hash)
assert(isinstance(block.get('tx'), list))
assert(len(block.get('tx')) != 0)
assert(isinstance(block.get('height'), int))
assert isinstance(block.get("tx"), list)
assert len(block.get("tx")) != 0
assert isinstance(block.get("height"), int)
# It should fail for wrong data formats and random ids
for v in MIXED_VALUES:
@@ -73,21 +74,21 @@ def test_get_block(genesis_block_hash):
def test_decoderawtransaction(genesis_block_hash):
# decoderawtransaction should only return if the given transaction matches a txid format
block = bitcoin_cli.getblock(genesis_block_hash)
coinbase_txid = block.get('tx')[0]
coinbase_txid = block.get("tx")[0]
coinbase_tx = bitcoin_cli.getrawtransaction(coinbase_txid).get("hex")
tx = bitcoin_cli.decoderawtransaction(coinbase_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('txid'), str))
assert(check_hash_format(tx.get('txid')))
assert isinstance(tx, dict)
assert isinstance(tx.get("txid"), str)
assert check_hash_format(tx.get("txid"))
# Therefore should also work for a random transaction hex in our simulation
random_tx = TX.create_dummy_transaction()
tx = bitcoin_cli.decoderawtransaction(random_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('txid'), str))
assert(check_hash_format(tx.get('txid')))
assert isinstance(tx, dict)
assert isinstance(tx.get("txid"), str)
assert check_hash_format(tx.get("txid"))
# But it should fail for not proper formatted one
for v in MIXED_VALUES:
@@ -124,8 +125,8 @@ def test_getrawtransaction(genesis_block_hash):
genesis_tx = bitcoin_cli.getblock(genesis_block_hash).get("tx")[0]
tx = bitcoin_cli.getrawtransaction(genesis_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('confirmations'), int))
assert isinstance(tx, dict)
assert isinstance(tx.get("confirmations"), int)
for v in MIXED_VALUES:
try:
@@ -138,9 +139,5 @@ def test_getrawtransaction(genesis_block_hash):
def test_getblockcount():
# getblockcount should always return a positive integer
bc = bitcoin_cli.getblockcount()
assert (isinstance(bc, int))
assert (bc >= 0)
assert isinstance(bc, int)
assert bc >= 0

View File

@@ -137,14 +137,16 @@ class TX:
tx.prev_out_index = [prev_out_index]
tx.nLockTime = 0
tx.scriptSig = [
'47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860'
'a4acdd12909d831cc56cbbac4622082221a8768d1d0901']
"47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860"
"a4acdd12909d831cc56cbbac4622082221a8768d1d0901"
]
tx.scriptSig_len = [77]
tx.nSequence = [4294967295]
tx.value = [5000000000]
tx.scriptPubKey = [
'4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c'
'1b7303b8a0626f1baded5c72a704f7e6cd84cac']
"4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c"
"1b7303b8a0626f1baded5c72a704f7e6cd84cac"
]
tx.scriptPubKey_len = [67]
return tx.serialize()

View File

@@ -17,7 +17,7 @@ def change_endianness(x):
y = unhexlify(x)
z = y[::-1]
return hexlify(z).decode('utf-8')
return hexlify(z).decode("utf-8")
def parse_varint(tx):
@@ -30,10 +30,10 @@ def parse_varint(tx):
# First of all, the offset of the hex transaction if moved to the proper position (i.e where the varint should be
# located) and the length and format of the data to be analyzed is checked.
data = tx.hex[tx.offset:]
assert (len(data) > 0)
data = tx.hex[tx.offset :]
assert len(data) > 0
size = int(data[:2], 16)
assert (size <= 255)
assert size <= 255
# Then, the integer is encoded as a varint using the proper prefix, if needed.
if size <= 252: # No prefix
@@ -49,7 +49,7 @@ def parse_varint(tx):
# Finally, the storage length is used to extract the proper number of bytes from the transaction hex and the
# transaction offset is updated.
varint = data[:storage_length * 2]
varint = data[: storage_length * 2]
tx.offset += storage_length * 2
return varint
@@ -65,7 +65,7 @@ def parse_element(tx, size):
:rtype: hex str
"""
element = tx.hex[tx.offset:tx.offset + size * 2]
element = tx.hex[tx.offset : tx.offset + size * 2]
tx.offset += size * 2
return element
@@ -97,7 +97,7 @@ def encode_varint(value):
prefix = 255 # 0xFF
else:
raise Exception("Wrong input data size")
varint = format(prefix, 'x') + change_endianness(int2bytes(value, size))
varint = format(prefix, "x") + change_endianness(int2bytes(value, size))
return varint
@@ -112,12 +112,13 @@ def int2bytes(a, b):
:rtype: hex str
"""
m = pow(2, 8*b) - 1
m = pow(2, 8 * b) - 1
if a > m:
raise Exception(str(a) + " is too big to be represented with " + str(b) + " bytes. Maximum value is "
+ str(m) + ".")
raise Exception(
str(a) + " is too big to be represented with " + str(b) + " bytes. Maximum value is " + str(m) + "."
)
return ('%0' + str(2 * b) + 'x') % a
return ("%0" + str(2 * b) + "x") % a
def sha256d(hex_data):
@@ -125,4 +126,3 @@ def sha256d(hex_data):
double_sha256 = sha256(sha256(data).digest()).hexdigest()
return change_endianness(double_sha256)

View File

@@ -20,7 +20,7 @@ from test.simulator.transaction import TX
from test.simulator.bitcoind_sim import run_simulator, HOST, PORT
@pytest.fixture(scope='session')
@pytest.fixture(scope="session")
def run_bitcoind():
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event"})
bitcoind_thread.daemon = True
@@ -30,7 +30,7 @@ def run_bitcoind():
sleep(0.1)
@pytest.fixture(scope='session')
@pytest.fixture(scope="session")
def run_api():
db_manager = DBManager(DB_PATH)
watcher = Watcher(db_manager)
@@ -43,23 +43,23 @@ def run_api():
sleep(0.1)
@pytest.fixture(scope='session', autouse=True)
@pytest.fixture(scope="session", autouse=True)
def prng_seed():
random.seed(0)
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def db_manager():
manager = DBManager('test_db')
manager = DBManager("test_db")
yield manager
manager.db.close()
rmtree('test_db')
rmtree("test_db")
def get_random_value_hex(nbytes):
pseudo_random_value = random.getrandbits(8 * nbytes)
prv_hex = '{:x}'.format(pseudo_random_value)
prv_hex = "{:x}".format(pseudo_random_value)
return prv_hex.zfill(2 * nbytes)
@@ -80,8 +80,13 @@ def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30):
dispute_txid = sha256d(dispute_tx)
justice_tx = TX.create_dummy_transaction(dispute_txid)
dummy_appointment_data = {"tx": justice_tx, "tx_id": dispute_txid, "start_time": current_height + start_time_offset,
"end_time": current_height + end_time_offset, "dispute_delta": 20}
dummy_appointment_data = {
"tx": justice_tx,
"tx_id": dispute_txid,
"start_time": current_height + start_time_offset,
"end_time": current_height + end_time_offset,
"dispute_delta": 20,
}
cipher = "AES-GCM-128"
hash_function = "SHA256"
@@ -91,18 +96,24 @@ def generate_dummy_appointment_data(start_time_offset=5, end_time_offset=30):
encrypted_blob = blob.encrypt((dummy_appointment_data.get("tx_id")))
appointment_data = {"locator": locator, "start_time": dummy_appointment_data.get("start_time"),
"end_time": dummy_appointment_data.get("end_time"),
"dispute_delta": dummy_appointment_data.get("dispute_delta"),
"encrypted_blob": encrypted_blob, "cipher": cipher, "hash_function": hash_function,
"triggered": False}
appointment_data = {
"locator": locator,
"start_time": dummy_appointment_data.get("start_time"),
"end_time": dummy_appointment_data.get("end_time"),
"dispute_delta": dummy_appointment_data.get("dispute_delta"),
"encrypted_blob": encrypted_blob,
"cipher": cipher,
"hash_function": hash_function,
"triggered": False,
}
return appointment_data, dispute_tx
def generate_dummy_appointment(start_time_offset=5, end_time_offset=30):
appointment_data, dispute_tx = generate_dummy_appointment_data(start_time_offset=start_time_offset,
end_time_offset=end_time_offset)
appointment_data, dispute_tx = generate_dummy_appointment_data(
start_time_offset=start_time_offset, end_time_offset=end_time_offset
)
return Appointment.from_dict(appointment_data), dispute_tx
@@ -112,7 +123,8 @@ def generate_dummy_job():
justice_txid = get_random_value_hex(32)
justice_rawtx = get_random_value_hex(100)
job_data = dict(dispute_txid=dispute_txid, justice_txid=justice_txid, justice_rawtx=justice_rawtx,
appointment_end=100)
job_data = dict(
dispute_txid=dispute_txid, justice_txid=justice_txid, justice_rawtx=justice_rawtx, appointment_end=100
)
return Job.from_dict(job_data)

View File

@@ -36,22 +36,22 @@ def add_appointment(appointment):
def test_add_appointment(run_api, run_bitcoind, new_appointment):
# Properly formatted appointment
r = add_appointment(new_appointment)
assert (r.status_code == 200)
assert r.status_code == 200
# Incorrect appointment
new_appointment["dispute_delta"] = 0
r = add_appointment(new_appointment)
assert (r.status_code == 400)
assert r.status_code == 400
def test_request_appointment(new_appointment):
# First we need to add an appointment
r = add_appointment(new_appointment)
assert (r.status_code == 200)
assert r.status_code == 200
# Next we can request it
r = requests.get(url=PISA_API + "/get_appointment?locator=" + new_appointment["locator"])
assert (r.status_code == 200)
assert r.status_code == 200
# Each locator may point to multiple appointments, check them all
received_appointments = json.loads(r.content)
@@ -60,20 +60,20 @@ def test_request_appointment(new_appointment):
appointment_status = [appointment.pop("status") for appointment in received_appointments]
# Check that the appointment is within the received appoints
assert (new_appointment in received_appointments)
assert new_appointment in received_appointments
# Check that all the appointments are being watched
assert (all([status == "being_watched" for status in appointment_status]))
assert all([status == "being_watched" for status in appointment_status])
def test_request_random_appointment():
r = requests.get(url=PISA_API + "/get_appointment?locator=" + get_random_value_hex(32))
assert (r.status_code == 200)
assert r.status_code == 200
received_appointments = json.loads(r.content)
appointment_status = [appointment.pop("status") for appointment in received_appointments]
assert (all([status == "not_found" for status in appointment_status]))
assert all([status == "not_found" for status in appointment_status])
def test_add_appointment_multiple_times(new_appointment, n=MULTIPLE_APPOINTMENTS):
@@ -81,29 +81,29 @@ def test_add_appointment_multiple_times(new_appointment, n=MULTIPLE_APPOINTMENTS
# DISCUSS: #34-store-identical-appointments
for _ in range(n):
r = add_appointment(new_appointment)
assert (r.status_code == 200)
assert r.status_code == 200
def test_request_multiple_appointments_same_locator(new_appointment, n=MULTIPLE_APPOINTMENTS):
for _ in range(n):
r = add_appointment(new_appointment)
assert (r.status_code == 200)
assert r.status_code == 200
test_request_appointment(new_appointment)
def test_add_too_many_appointment(new_appointment):
for _ in range(MAX_APPOINTMENTS-len(appointments)):
for _ in range(MAX_APPOINTMENTS - len(appointments)):
r = add_appointment(new_appointment)
assert (r.status_code == 200)
assert r.status_code == 200
r = add_appointment(new_appointment)
assert (r.status_code == 503)
assert r.status_code == 503
def test_get_all_appointments_watcher():
r = requests.get(url=PISA_API + "/get_all_appointments")
assert (r.status_code == 200 and r.reason == 'OK')
assert r.status_code == 200 and r.reason == "OK"
received_appointments = json.loads(r.content)
@@ -111,8 +111,8 @@ def test_get_all_appointments_watcher():
watcher_locators = [v["locator"] for k, v in received_appointments["watcher_appointments"].items()]
local_locators = [appointment["locator"] for appointment in appointments]
assert(set(watcher_locators) == set(local_locators))
assert(len(received_appointments["responder_jobs"]) == 0)
assert set(watcher_locators) == set(local_locators)
assert len(received_appointments["responder_jobs"]) == 0
def test_get_all_appointments_responder():
@@ -138,5 +138,5 @@ def test_get_all_appointments_responder():
watcher_appointments = [v["locator"] for k, v in received_appointments["watcher_appointments"].items()]
print(set(watcher_appointments) == set(local_locators))
assert (set(responder_jobs) == set(local_locators))
assert (len(received_appointments["watcher_appointments"]) == 0)
assert set(responder_jobs) == set(local_locators)
assert len(received_appointments["watcher_appointments"]) == 0

View File

@@ -33,9 +33,15 @@ def test_init_appointment(appointment_data):
appointment = Appointment(locator, start_time, end_time, dispute_delta, encrypted_blob_data, cipher, hash_function)
assert (locator == appointment.locator and start_time == appointment.start_time and end_time == appointment.end_time
and EncryptedBlob(encrypted_blob_data) == appointment.encrypted_blob and cipher == appointment.cipher
and dispute_delta == appointment.dispute_delta and hash_function == appointment.hash_function)
assert (
locator == appointment.locator
and start_time == appointment.start_time
and end_time == appointment.end_time
and EncryptedBlob(encrypted_blob_data) == appointment.encrypted_blob
and cipher == appointment.cipher
and dispute_delta == appointment.dispute_delta
and hash_function == appointment.hash_function
)
def test_to_dict(appointment_data):
@@ -44,10 +50,15 @@ def test_to_dict(appointment_data):
dict_appointment = appointment.to_dict()
assert (locator == dict_appointment.get("locator") and start_time == dict_appointment.get("start_time")
and end_time == dict_appointment.get("end_time") and dispute_delta == dict_appointment.get("dispute_delta")
and cipher == dict_appointment.get("cipher") and hash_function == dict_appointment.get("hash_function")
and encrypted_blob_data == dict_appointment.get("encrypted_blob"))
assert (
locator == dict_appointment.get("locator")
and start_time == dict_appointment.get("start_time")
and end_time == dict_appointment.get("end_time")
and dispute_delta == dict_appointment.get("dispute_delta")
and cipher == dict_appointment.get("cipher")
and hash_function == dict_appointment.get("hash_function")
and encrypted_blob_data == dict_appointment.get("encrypted_blob")
)
def test_to_json(appointment_data):
@@ -56,7 +67,12 @@ def test_to_json(appointment_data):
dict_appointment = json.loads(appointment.to_json())
assert (locator == dict_appointment.get("locator") and start_time == dict_appointment.get("start_time")
and end_time == dict_appointment.get("end_time") and dispute_delta == dict_appointment.get("dispute_delta")
and cipher == dict_appointment.get("cipher") and hash_function == dict_appointment.get("hash_function")
and encrypted_blob_data == dict_appointment.get("encrypted_blob"))
assert (
locator == dict_appointment.get("locator")
and start_time == dict_appointment.get("start_time")
and end_time == dict_appointment.get("end_time")
and dispute_delta == dict_appointment.get("dispute_delta")
and cipher == dict_appointment.get("cipher")
and hash_function == dict_appointment.get("hash_function")
and encrypted_blob_data == dict_appointment.get("encrypted_blob")
)

View File

@@ -18,7 +18,7 @@ def test_init_blob():
for case in cipher_cases:
blob = Blob(data, case, hash_function)
assert(blob.data == data and blob.cipher == case and blob.hash_function == hash_function)
assert blob.data == data and blob.cipher == case and blob.hash_function == hash_function
# Fixed (valid) cipher, try different valid hash functions
cipher = SUPPORTED_CIPHERS[0]
@@ -27,7 +27,7 @@ def test_init_blob():
for case in hash_function_cases:
blob = Blob(data, cipher, case)
assert(blob.data == data and blob.cipher == cipher and blob.hash_function == case)
assert blob.data == data and blob.cipher == cipher and blob.hash_function == case
# Invalid data
data = unhexlify(get_random_value_hex(64))
@@ -87,4 +87,4 @@ def test_encrypt():
# Check that two encryptions of the same data have the same result
encrypted_blob2 = blob.encrypt(key)
assert(encrypted_blob == encrypted_blob2 and id(encrypted_blob) != id(encrypted_blob2))
assert encrypted_blob == encrypted_blob2 and id(encrypted_blob) != id(encrypted_blob2)

View File

@@ -13,12 +13,12 @@ APPOINTMENT_COUNT = 100
TEST_SET_SIZE = 200
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def txids():
return [get_random_value_hex(32) for _ in range(APPOINTMENT_COUNT)]
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def locator_uuid_map(txids):
return {sha256(unhexlify(txid)).hexdigest(): uuid4().hex for txid in txids}
@@ -40,7 +40,7 @@ def test_get_block(best_block_hash):
# Checking that the received block has at least the fields we need
# FIXME: We could be more strict here, but we'll need to add those restrictions to bitcoind_sim too
assert isinstance(block, dict)
assert block.get('hash') == best_block_hash and 'height' in block and 'previousblockhash' in block and 'tx' in block
assert block.get("hash") == best_block_hash and "height" in block and "previousblockhash" in block and "tx" in block
def test_get_random_block():

View File

@@ -74,6 +74,3 @@ def test_build_block_queue():
blocks.remove(block)
assert len(blocks) == 0

View File

@@ -17,7 +17,7 @@ c_logger.disabled = True
sent_txs = []
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def carrier():
return Carrier()
@@ -28,7 +28,7 @@ def test_send_transaction(run_bitcoind, carrier):
receipt = carrier.send_transaction(tx, txid)
assert(receipt.delivered is True)
assert receipt.delivered is True
def test_send_double_spending_transaction(carrier):
@@ -47,9 +47,8 @@ def test_send_double_spending_transaction(carrier):
# The carrier should report delivered True for both, but in the second case the transaction was already delivered
# (either by himself or someone else)
assert(receipt.delivered is True)
assert (receipt2.delivered is True and receipt2.confirmations >= 1
and receipt2.reason == RPC_VERIFY_ALREADY_IN_CHAIN)
assert receipt.delivered is True
assert receipt2.delivered is True and receipt2.confirmations >= 1 and receipt2.reason == RPC_VERIFY_ALREADY_IN_CHAIN
def test_send_transaction_invalid_format(carrier):
@@ -58,7 +57,7 @@ def test_send_transaction_invalid_format(carrier):
txid = sha256d(tx)
receipt = carrier.send_transaction(txid, txid)
assert (receipt.delivered is False and receipt.reason == RPC_DESERIALIZATION_ERROR)
assert receipt.delivered is False and receipt.reason == RPC_DESERIALIZATION_ERROR
def test_get_transaction():
@@ -73,5 +72,3 @@ def test_get_non_existing_transaction():
tx_info = Carrier.get_transaction(get_random_value_hex(32))
assert tx_info is None

View File

@@ -9,12 +9,12 @@ from test.unit.conftest import get_random_value_hex, generate_dummy_appointment
from pisa.db_manager import WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY, LOCATOR_MAP_PREFIX
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def watcher_appointments():
return {uuid4().hex: generate_dummy_appointment()[0] for _ in range(10)}
@pytest.fixture(scope='module')
@pytest.fixture(scope="module")
def responder_jobs():
return {get_random_value_hex(32): get_random_value_hex(32) for _ in range(10)}
@@ -31,7 +31,7 @@ def open_create_db(db_path):
def test_init():
db_path = 'init_test_db'
db_path = "init_test_db"
# First we check if the db exists, and if so we delete it
if os.path.isdir(db_path):
@@ -57,7 +57,7 @@ def test_init():
def test_load_appointments_db(db_manager):
# Let's made up a prefix and try to load data from the database using it
prefix = 'XX'
prefix = "XX"
db_appointments = db_manager.load_appointments_db(prefix)
assert len(db_appointments) == 0
@@ -69,7 +69,7 @@ def test_load_appointments_db(db_manager):
value = get_random_value_hex(32)
local_appointments[key] = value
db_manager.db.put((prefix+key).encode('utf-8'), json.dumps({'value': value}).encode('utf-8'))
db_manager.db.put((prefix + key).encode("utf-8"), json.dumps({"value": value}).encode("utf-8"))
db_appointments = db_manager.load_appointments_db(prefix)
@@ -88,7 +88,7 @@ def test_get_last_known_block(db_manager):
# After saving some block in the db we should get that exact value
for key in [WATCHER_LAST_BLOCK_KEY, RESPONDER_LAST_BLOCK_KEY]:
block_hash = get_random_value_hex(32)
db_manager.db.put(key.encode('utf-8'), block_hash.encode('utf-8'))
db_manager.db.put(key.encode("utf-8"), block_hash.encode("utf-8"))
assert db_manager.get_last_known_block(key) == block_hash
@@ -100,24 +100,24 @@ def test_create_entry(db_manager):
db_manager.create_entry(key, value)
# We should be able to get it straightaway from the key
assert db_manager.db.get(key.encode('utf-8')).decode('utf-8') == value
assert db_manager.db.get(key.encode("utf-8")).decode("utf-8") == value
# If we prefix the key we should be able to get it if we add the prefix, but not otherwise
key = get_random_value_hex(32)
prefix = 'w'
prefix = "w"
db_manager.create_entry(key, value, prefix=prefix)
assert db_manager.db.get((prefix+key).encode('utf-8')).decode('utf-8') == value
assert db_manager.db.get(key.encode('utf-8')) is None
assert db_manager.db.get((prefix + key).encode("utf-8")).decode("utf-8") == value
assert db_manager.db.get(key.encode("utf-8")) is None
# Same if we try to use any other prefix
another_prefix = 'r'
assert db_manager.db.get((another_prefix+key).encode('utf-8')) is None
another_prefix = "r"
assert db_manager.db.get((another_prefix + key).encode("utf-8")) is None
def test_delete_entry(db_manager):
# Let's first get the key all the things we've wrote so far in the db
data = [k.decode('utf-8') for k, v in db_manager.db.iterator()]
data = [k.decode("utf-8") for k, v in db_manager.db.iterator()]
# Let's empty the db now
for key in data:
@@ -132,11 +132,11 @@ def test_delete_entry(db_manager):
db_manager.create_entry(key, value, prefix)
# Checks it's there
assert db_manager.db.get((prefix + key).encode('utf-8')).decode('utf-8') == value
assert db_manager.db.get((prefix + key).encode("utf-8")).decode("utf-8") == value
# And now it's gone
db_manager.delete_entry(key, prefix)
assert db_manager.db.get((prefix+key).encode('utf-8')) is None
assert db_manager.db.get((prefix + key).encode("utf-8")) is None
def test_load_watcher_appointments_empty(db_manager):
@@ -172,14 +172,14 @@ def test_store_update_locator_map_empty(db_manager):
def test_delete_locator_map(db_manager):
locator_maps = db_manager.load_appointments_db(prefix=LOCATOR_MAP_PREFIX)
assert(len(locator_maps) != 0)
assert len(locator_maps) != 0
for locator, uuids in locator_maps.items():
print(locator)
db_manager.delete_locator_map(locator)
locator_maps = db_manager.load_appointments_db(prefix=LOCATOR_MAP_PREFIX)
assert (len(locator_maps) == 0)
assert len(locator_maps) == 0
def test_store_load_watcher_appointment(db_manager, watcher_appointments):
@@ -200,7 +200,7 @@ def test_store_load_watcher_appointment(db_manager, watcher_appointments):
def test_store_load_appointment_jobs(db_manager, responder_jobs):
for key, value in responder_jobs.items():
db_manager.store_responder_job(key, json.dumps({'value': value}))
db_manager.store_responder_job(key, json.dumps({"value": value}))
db_responder_jobs = db_manager.load_responder_jobs()
@@ -252,6 +252,3 @@ def test_store_load_last_block_hash_responder(db_manager):
db_last_block_hash = db_manager.load_last_block_hash_responder()
assert local_last_block_hash == db_last_block_hash

View File

@@ -8,7 +8,7 @@ c_logger.disabled = True
def test_init_encrypted_blob():
# No much to test here, basically that the object is properly created
data = get_random_value_hex(64)
assert (EncryptedBlob(data).data == data)
assert EncryptedBlob(data).data == data
def test_decrypt():
@@ -24,8 +24,8 @@ def test_decrypt():
# Valid data should run with no InvalidTag and verify
data = "6097cdf52309b1b2124efeed36bd34f46dc1c25ad23ac86f28380f746254f777"
key = 'b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659'
key = "b2e984a570f6f49bc38ace178e09147b0aa296cbb7c92eb01412f7e2d07b5659"
encrypted_data = "092e93d4a34aac4367075506f2c050ddfa1a201ee6669b65058572904dcea642aeb01ea4b57293618e8c46809dfadadc"
encrypted_blob = EncryptedBlob(encrypted_data)
assert(encrypted_blob.decrypt(key) == data)
assert encrypted_blob.decrypt(key) == data

View File

@@ -13,38 +13,38 @@ c_logger.disabled = True
inspector = Inspector()
APPOINTMENT_OK = (0, None)
NO_HEX_STRINGS = ["R" * 64, get_random_value_hex(31) + "PP", "$"*64, " "*64]
WRONG_TYPES = [[], '', get_random_value_hex(32), 3.2, 2.0, (), object, {}, " "*32, object()]
NO_HEX_STRINGS = ["R" * 64, get_random_value_hex(31) + "PP", "$" * 64, " " * 64]
WRONG_TYPES = [[], "", get_random_value_hex(32), 3.2, 2.0, (), object, {}, " " * 32, object()]
WRONG_TYPES_NO_STR = [[], unhexlify(get_random_value_hex(32)), 3.2, 2.0, (), object, {}, object()]
def test_check_locator():
# Right appointment type, size and format
locator = get_random_value_hex(32)
assert(Inspector.check_locator(locator) == APPOINTMENT_OK)
assert Inspector.check_locator(locator) == APPOINTMENT_OK
# Wrong size (too big)
locator = get_random_value_hex(33)
assert(Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE)
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE
# Wrong size (too small)
locator = get_random_value_hex(31)
assert(Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE)
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_SIZE
# Empty
locator = None
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_locator(locator)[0] == APPOINTMENT_EMPTY_FIELD
# Wrong type (several types tested, it should do for anything that is not a string)
locators = [[], -1, 3.2, 0, 4, (), object, {}, object()]
for locator in locators:
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_TYPE
# Wrong format (no hex)
locators = NO_HEX_STRINGS
for locator in locators:
assert (Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_FORMAT)
assert Inspector.check_locator(locator)[0] == APPOINTMENT_WRONG_FIELD_FORMAT
def test_check_start_time():
@@ -53,21 +53,21 @@ def test_check_start_time():
# Right format and right value (start time in the future)
start_time = 101
assert (Inspector.check_start_time(start_time, current_time) == APPOINTMENT_OK)
assert Inspector.check_start_time(start_time, current_time) == APPOINTMENT_OK
# Start time too small (either same block or block in the past)
start_times = [100, 99, 98, -1]
for start_time in start_times:
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
# Empty field
start_time = None
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD
# Wrong data type
start_times = WRONG_TYPES
for start_time in start_times:
assert (Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_start_time(start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE
def test_check_end_time():
@@ -77,54 +77,54 @@ def test_check_end_time():
# Right format and right value (start time before end and end in the future)
end_time = 121
assert (Inspector.check_end_time(end_time, start_time, current_time) == APPOINTMENT_OK)
assert Inspector.check_end_time(end_time, start_time, current_time) == APPOINTMENT_OK
# End time too small (start time after end time)
end_times = [120, 119, 118, -1]
for end_time in end_times:
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
# End time too small (either same height as current block or in the past)
current_time = 130
end_times = [130, 129, 128, -1]
for end_time in end_times:
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL)
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_FIELD_TOO_SMALL
# Empty field
end_time = None
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_EMPTY_FIELD
# Wrong data type
end_times = WRONG_TYPES
for end_time in end_times:
assert (Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_end_time(end_time, start_time, current_time)[0] == APPOINTMENT_WRONG_FIELD_TYPE
def test_check_delta():
# Right value, right format
deltas = [MIN_DISPUTE_DELTA, MIN_DISPUTE_DELTA+1, MIN_DISPUTE_DELTA+1000]
deltas = [MIN_DISPUTE_DELTA, MIN_DISPUTE_DELTA + 1, MIN_DISPUTE_DELTA + 1000]
for delta in deltas:
assert (Inspector.check_delta(delta) == APPOINTMENT_OK)
assert Inspector.check_delta(delta) == APPOINTMENT_OK
# Delta too small
deltas = [MIN_DISPUTE_DELTA-1, MIN_DISPUTE_DELTA-2, 0, -1, -1000]
deltas = [MIN_DISPUTE_DELTA - 1, MIN_DISPUTE_DELTA - 2, 0, -1, -1000]
for delta in deltas:
assert (Inspector.check_delta(delta)[0] == APPOINTMENT_FIELD_TOO_SMALL)
assert Inspector.check_delta(delta)[0] == APPOINTMENT_FIELD_TOO_SMALL
# Empty field
delta = None
assert(Inspector.check_delta(delta)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_delta(delta)[0] == APPOINTMENT_EMPTY_FIELD
# Wrong data type
deltas = WRONG_TYPES
for delta in deltas:
assert (Inspector.check_delta(delta)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_delta(delta)[0] == APPOINTMENT_WRONG_FIELD_TYPE
def test_check_blob():
# Right format and length
encrypted_blob = get_random_value_hex(120)
assert(Inspector.check_blob(encrypted_blob) == APPOINTMENT_OK)
assert Inspector.check_blob(encrypted_blob) == APPOINTMENT_OK
# # Wrong content
# # FIXME: There is not proper defined format for this yet. It should be restricted by size at least, and check it
@@ -133,16 +133,16 @@ def test_check_blob():
# Wrong type
encrypted_blobs = WRONG_TYPES_NO_STR
for encrypted_blob in encrypted_blobs:
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_TYPE
# Empty field
encrypted_blob = None
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_EMPTY_FIELD
# Wrong format (no hex)
encrypted_blobs = NO_HEX_STRINGS
for encrypted_blob in encrypted_blobs:
assert (Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_FORMAT)
assert Inspector.check_blob(encrypted_blob)[0] == APPOINTMENT_WRONG_FIELD_FORMAT
def test_check_cipher():
@@ -150,21 +150,21 @@ def test_check_cipher():
for cipher in SUPPORTED_CIPHERS:
cipher_cases = [cipher, cipher.lower(), cipher.capitalize()]
for case in cipher_cases:
assert(Inspector.check_cipher(case) == APPOINTMENT_OK)
assert Inspector.check_cipher(case) == APPOINTMENT_OK
# Wrong type
ciphers = WRONG_TYPES_NO_STR
for cipher in ciphers:
assert(Inspector.check_cipher(cipher)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_WRONG_FIELD_TYPE
# Wrong value
ciphers = NO_HEX_STRINGS
for cipher in ciphers:
assert(Inspector.check_cipher(cipher)[0] == APPOINTMENT_CIPHER_NOT_SUPPORTED)
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_CIPHER_NOT_SUPPORTED
# Empty field
cipher = None
assert (Inspector.check_cipher(cipher)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_cipher(cipher)[0] == APPOINTMENT_EMPTY_FIELD
def test_check_hash_function():
@@ -172,21 +172,21 @@ def test_check_hash_function():
for hash_function in SUPPORTED_HASH_FUNCTIONS:
hash_function_cases = [hash_function, hash_function.lower(), hash_function.capitalize()]
for case in hash_function_cases:
assert (Inspector.check_hash_function(case) == APPOINTMENT_OK)
assert Inspector.check_hash_function(case) == APPOINTMENT_OK
# Wrong type
hash_functions = WRONG_TYPES_NO_STR
for hash_function in hash_functions:
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_WRONG_FIELD_TYPE)
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_WRONG_FIELD_TYPE
# Wrong value
hash_functions = NO_HEX_STRINGS
for hash_function in hash_functions:
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED)
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED
# Empty field
hash_function = None
assert (Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_EMPTY_FIELD)
assert Inspector.check_hash_function(hash_function)[0] == APPOINTMENT_EMPTY_FIELD
def test_inspect(run_bitcoind):
@@ -196,7 +196,7 @@ def test_inspect(run_bitcoind):
# Invalid appointment, every field is empty
appointment_data = dict()
appointment = inspector.inspect(appointment_data)
assert (type(appointment) == tuple and appointment[0] != 0)
assert type(appointment) == tuple and appointment[0] != 0
# Valid appointment
locator = get_random_value_hex(32)
@@ -207,13 +207,25 @@ def test_inspect(run_bitcoind):
cipher = SUPPORTED_CIPHERS[0]
hash_function = SUPPORTED_HASH_FUNCTIONS[0]
appointment_data = {"locator": locator, "start_time": start_time, "end_time": end_time,
"dispute_delta": dispute_delta, "encrypted_blob": encrypted_blob, "cipher": cipher,
"hash_function": hash_function}
appointment_data = {
"locator": locator,
"start_time": start_time,
"end_time": end_time,
"dispute_delta": dispute_delta,
"encrypted_blob": encrypted_blob,
"cipher": cipher,
"hash_function": hash_function,
}
appointment = inspector.inspect(appointment_data)
assert(type(appointment) == Appointment and appointment.locator == locator and appointment.start_time == start_time
and appointment.end_time == end_time and appointment.dispute_delta == dispute_delta and
appointment.encrypted_blob.data == encrypted_blob and appointment.cipher == cipher and
appointment.hash_function == hash_function)
assert (
type(appointment) == Appointment
and appointment.locator == locator
and appointment.start_time == start_time
and appointment.end_time == end_time
and appointment.dispute_delta == dispute_delta
and appointment.encrypted_blob.data == encrypted_blob
and appointment.cipher == cipher
and appointment.hash_function == hash_function
)

View File

@@ -31,12 +31,14 @@ def create_dummy_job_data(random_txid=False, justice_rawtx=None):
justice_txid = "f4184fc596403b9d638783cf57adfe4c75c605f6356fbc91338530e9831e9e16"
if justice_rawtx is None:
justice_rawtx = "0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402" \
"204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4" \
"acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b" \
"13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba" \
"ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e" \
"cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000"
justice_rawtx = (
"0100000001c997a5e56e104102fa209c6a852dd90660a20b2d9c352423edce25857fcd3704000000004847304402"
"204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4"
"acdd12909d831cc56cbbac4622082221a8768d1d0901ffffffff0200ca9a3b00000000434104ae1a62fe09c5f51b"
"13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1ba"
"ded5c72a704f7e6cd84cac00286bee0000000043410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482e"
"cad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b8643f656b412a3ac00000000"
)
else:
justice_txid = sha256d(justice_rawtx)
@@ -58,24 +60,34 @@ def test_job_init(run_bitcoind):
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data()
job = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end)
assert job.dispute_txid == dispute_txid and job.justice_txid == justice_txid \
and job.justice_rawtx == justice_rawtx and job.appointment_end == appointment_end
assert (
job.dispute_txid == dispute_txid
and job.justice_txid == justice_txid
and job.justice_rawtx == justice_rawtx
and job.appointment_end == appointment_end
)
def test_job_to_dict():
job = create_dummy_job()
job_dict = job.to_dict()
assert job.locator == job_dict["locator"] and job.justice_rawtx == job_dict["justice_rawtx"] \
assert (
job.locator == job_dict["locator"]
and job.justice_rawtx == job_dict["justice_rawtx"]
and job.appointment_end == job_dict["appointment_end"]
)
def test_job_to_json():
job = create_dummy_job()
job_dict = json.loads(job.to_json())
assert job.locator == job_dict["locator"] and job.justice_rawtx == job_dict["justice_rawtx"] \
assert (
job.locator == job_dict["locator"]
and job.justice_rawtx == job_dict["justice_rawtx"]
and job.appointment_end == job_dict["appointment_end"]
)
def test_init_responder(responder):
@@ -97,8 +109,14 @@ def test_add_response(responder):
responder.asleep = False
# The block_hash passed to add_response does not matter much now. It will in the future to deal with errors
receipt = responder.add_response(uuid, job.dispute_txid, job.justice_txid, job.justice_rawtx, job.appointment_end,
block_hash=get_random_value_hex(32))
receipt = responder.add_response(
uuid,
job.dispute_txid,
job.justice_txid,
job.justice_rawtx,
job.appointment_end,
block_hash=get_random_value_hex(32),
)
assert receipt.delivered is True
@@ -124,9 +142,13 @@ def test_create_job(responder):
# Check that the rest of job data also matches
job = responder.jobs[uuid]
assert job.dispute_txid == dispute_txid and job.justice_txid == justice_txid \
and job.justice_rawtx == justice_rawtx and job.appointment_end == appointment_end \
assert (
job.dispute_txid == dispute_txid
and job.justice_txid == justice_txid
and job.justice_rawtx == justice_rawtx
and job.appointment_end == appointment_end
and job.appointment_end == appointment_end
)
def test_create_job_already_confirmed(responder):
@@ -134,9 +156,10 @@ def test_create_job_already_confirmed(responder):
for i in range(20):
uuid = uuid4().hex
confirmations = i+1
confirmations = i + 1
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data(
justice_rawtx=TX.create_dummy_transaction())
justice_rawtx=TX.create_dummy_transaction()
)
responder.create_job(uuid, dispute_txid, justice_txid, justice_rawtx, appointment_end, confirmations)
@@ -218,7 +241,7 @@ def test_do_watch(responder):
def test_get_txs_to_rebroadcast(responder):
# Let's create a few fake txids and assign at least 6 missing confirmations to each
txs_missing_too_many_conf = {get_random_value_hex(32): 6+i for i in range(10)}
txs_missing_too_many_conf = {get_random_value_hex(32): 6 + i for i in range(10)}
# Let's create some other transaction that has missed some confirmations but not that many
txs_missing_some_conf = {get_random_value_hex(32): 3 for _ in range(10)}
@@ -299,7 +322,8 @@ def test_rebroadcast(db_manager):
for i in range(20):
uuid = uuid4().hex
dispute_txid, justice_txid, justice_rawtx, appointment_end = create_dummy_job_data(
justice_rawtx=TX.create_dummy_transaction())
justice_rawtx=TX.create_dummy_transaction()
)
responder.jobs[uuid] = Job(dispute_txid, justice_txid, justice_rawtx, appointment_end)
responder.tx_job_map[justice_txid] = [uuid]
@@ -319,20 +343,3 @@ def test_rebroadcast(db_manager):
assert receipt.delivered is True
assert responder.missed_confirmations[txid] == 0

View File

@@ -6,9 +6,9 @@ c_logger.disabled = True
def test_in_correct_network(run_bitcoind):
# The simulator runs as if it was regtest, so every other network should fail
assert in_correct_network('mainnet') is False
assert in_correct_network('testnet') is False
assert in_correct_network('regtest') is True
assert in_correct_network("mainnet") is False
assert in_correct_network("testnet") is False
assert in_correct_network("regtest") is True
def test_can_connect_to_bitcoind():
@@ -31,13 +31,13 @@ def test_bitcoin_cli():
def test_check_txid_format():
assert(check_txid_format(None) is False)
assert(check_txid_format("") is False)
assert(check_txid_format(0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef) is False) # wrong type
assert(check_txid_format("abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd") is True) # lowercase
assert(check_txid_format("ABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCD") is True) # uppercase
assert(check_txid_format("0123456789abcdef0123456789ABCDEF0123456789abcdef0123456789ABCDEF") is True) # mixed case
assert(check_txid_format("0123456789012345678901234567890123456789012345678901234567890123") is True) # only nums
assert(check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdf") is False) # too short
assert(check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0") is False) # too long
assert(check_txid_format("g123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef") is False) # non-hex
assert check_txid_format(None) is False
assert check_txid_format("") is False
assert check_txid_format(0x0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF) is False # wrong type
assert check_txid_format("abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd") is True # lowercase
assert check_txid_format("ABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCDEFABCD") is True # uppercase
assert check_txid_format("0123456789abcdef0123456789ABCDEF0123456789abcdef0123456789ABCDEF") is True # mixed case
assert check_txid_format("0123456789012345678901234567890123456789012345678901234567890123") is True # only nums
assert check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdf") is False # too short
assert check_txid_format("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0") is False # too long
assert check_txid_format("g123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef") is False # non-hex

View File

@@ -15,8 +15,15 @@ from pisa.responder import Responder
from pisa.tools import check_txid_format
from pisa.utils.auth_proxy import AuthServiceProxy
from test.unit.conftest import generate_block, generate_blocks, generate_dummy_appointment
from pisa.conf import EXPIRY_DELTA, BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT, PISA_SECRET_KEY, \
MAX_APPOINTMENTS
from pisa.conf import (
EXPIRY_DELTA,
BTC_RPC_USER,
BTC_RPC_PASSWD,
BTC_RPC_HOST,
BTC_RPC_PORT,
PISA_SECRET_KEY,
MAX_APPOINTMENTS,
)
c_logger.disabled = True
@@ -42,8 +49,9 @@ def create_appointments(n):
dispute_txs = []
for i in range(n):
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
end_time_offset=END_TIME_OFFSET)
appointment, dispute_tx = generate_dummy_appointment(
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
)
uuid = uuid4().hex
appointments[uuid] = appointment
@@ -80,8 +88,9 @@ def test_add_appointment(run_bitcoind, watcher):
# We should be able to add appointments up to the limit
for _ in range(10):
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
end_time_offset=END_TIME_OFFSET)
appointment, dispute_tx = generate_dummy_appointment(
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
)
added_appointment, sig = watcher.add_appointment(appointment)
assert added_appointment is True
@@ -89,8 +98,7 @@ def test_add_appointment(run_bitcoind, watcher):
def test_sign_appointment(watcher):
appointment, _ = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
end_time_offset=END_TIME_OFFSET)
appointment, _ = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET)
signature = watcher.sign_appointment(appointment)
assert is_signature_valid(appointment, signature, public_key)
@@ -100,15 +108,17 @@ def test_add_too_many_appointments(watcher):
watcher.appointments = dict()
for _ in range(MAX_APPOINTMENTS):
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
end_time_offset=END_TIME_OFFSET)
appointment, dispute_tx = generate_dummy_appointment(
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
)
added_appointment, sig = watcher.add_appointment(appointment)
assert added_appointment is True
assert is_signature_valid(appointment, sig, public_key)
appointment, dispute_tx = generate_dummy_appointment(start_time_offset=START_TIME_OFFSET,
end_time_offset=END_TIME_OFFSET)
appointment, dispute_tx = generate_dummy_appointment(
start_time_offset=START_TIME_OFFSET, end_time_offset=END_TIME_OFFSET
)
added_appointment, sig = watcher.add_appointment(appointment)
assert added_appointment is False