Refactors test structure

This commit is contained in:
Sergi Delgado Segura
2019-10-04 17:29:13 +01:00
parent 1e18630ce2
commit beac88a2c5
9 changed files with 2 additions and 2 deletions

0
test/__init__.py Normal file
View File

View File

@@ -0,0 +1,121 @@
import os
import json
import requests
import time
from copy import deepcopy
from hashlib import sha256
from binascii import hexlify, unhexlify
from apps.cli.blob import Blob
from pisa import HOST, PORT
from pisa.utils.auth_proxy import AuthServiceProxy
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT
PISA_API = "http://{}:{}".format(HOST, PORT)
def generate_dummy_appointment(dispute_txid):
r = requests.get(url=PISA_API + '/get_block_count', timeout=5)
current_height = r.json().get("block_count")
dummy_appointment_data = {"tx": hexlify(os.urandom(32)).decode('utf-8'),
"tx_id": dispute_txid, "start_time": current_height + 5,
"end_time": current_height + 10, "dispute_delta": 20}
cipher = "AES-GCM-128"
hash_function = "SHA256"
locator = sha256(unhexlify(dummy_appointment_data.get("tx_id"))).hexdigest()
blob = Blob(dummy_appointment_data.get("tx"), cipher, hash_function)
encrypted_blob = blob.encrypt((dummy_appointment_data.get("tx_id")), debug=False, logging=False)
appointment = {"locator": locator, "start_time": dummy_appointment_data.get("start_time"),
"end_time": dummy_appointment_data.get("end_time"),
"dispute_delta": dummy_appointment_data.get("dispute_delta"),
"encrypted_blob": encrypted_blob, "cipher": cipher, "hash_function": hash_function}
return appointment
def test_add_appointment(appointment=None):
if not appointment:
dispute_txid = hexlify(os.urandom(32)).decode('utf-8')
appointment = generate_dummy_appointment(dispute_txid)
print("Sending appointment (locator: {}) to PISA".format(appointment.get("locator")))
r = requests.post(url=PISA_API, json=json.dumps(appointment), timeout=5)
assert (r.status_code == 200 and r.reason == 'OK')
print(r.content.decode())
print("Requesting it back from PISA")
r = requests.get(url=PISA_API + "/get_appointment?locator=" + appointment["locator"])
assert (r.status_code == 200 and r.reason == 'OK')
received_appointments = json.loads(r.content)
# Take the status out and leave the received appointments ready to compare
appointment_status = [appointment.pop("status") for appointment in received_appointments]
# Check that the appointment is within the received appoints
assert (appointment in received_appointments)
# Check that all the appointments are being watched
assert (all([status == "being_watched" for status in appointment_status]))
def test_same_locator_multiple_appointments():
dispute_txid = hexlify(os.urandom(32)).decode('utf-8')
appointment = generate_dummy_appointment(dispute_txid)
# Send it once
test_add_appointment(appointment)
time.sleep(0.5)
# Try again with the same data
print("Sending it again")
test_add_appointment(appointment)
time.sleep(0.5)
# Try again with the same data but increasing the end time
print("Sending once more")
dup_appointment = deepcopy(appointment)
dup_appointment["end_time"] += 1
test_add_appointment(dup_appointment)
print("Sleeping 5 sec")
time.sleep(5)
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT))
print("Triggering PISA with dispute tx")
bitcoin_cli.sendrawtransaction(dispute_txid)
print("Sleeping 10 sec (waiting for a new block)")
time.sleep(10)
print("Getting all appointments")
r = requests.get(url=PISA_API + "/get_all_appointments")
assert (r.status_code == 200 and r.reason == 'OK')
received_appointments = json.loads(r.content)
# Make sure there is not pending instance of the locator in the watcher
watcher_locators = [appointment["locator"] for appointment in received_appointments["watcher_appointments"]]
assert(appointment["locator"] not in watcher_locators)
# Make sure all the appointments went trough
target_jobs = [v for k, v in received_appointments["responder_jobs"].items() if v["locator"] ==
appointment["locator"]]
assert (len(target_jobs) == 3)
if __name__ == '__main__':
test_same_locator_multiple_appointments()
print("All good!")

139
test/appointment_tests.py Normal file
View File

@@ -0,0 +1,139 @@
import logging
from pisa.inspector import Inspector
from pisa.appointment import Appointment
from pisa import errors
from pisa.utils.authproxy import AuthServiceProxy, JSONRPCException
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT, SUPPORTED_HASH_FUNCTIONS, \
SUPPORTED_CIPHERS, TEST_LOG_FILE
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO, handlers=[
logging.FileHandler(TEST_LOG_FILE)
])
appointment = {"locator": None, "start_time": None, "end_time": None, "dispute_delta": None,
"encrypted_blob": None, "cipher": None, "hash_function": None}
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST,
BTC_RPC_PORT))
try:
block_height = bitcoin_cli.getblockcount()
except JSONRPCException as e:
logging.error("[Inspector] JSONRPCException. Error code {}".format(e))
locators = [None, 0, 'A' * 31, "A" * 63 + "_"]
start_times = [None, 0, '', 15.0, block_height - 10]
end_times = [None, 0, '', 26.123, block_height - 11]
dispute_deltas = [None, 0, '', 1.2, -3, 30]
encrypted_blobs = [None, 0, '']
ciphers = [None, 0, '', 'foo']
hash_functions = [None, 0, '', 'foo']
locators_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_WRONG_FIELD_SIZE, errors.APPOINTMENT_WRONG_FIELD_FORMAT]
start_time_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_FIELD_TOO_SMALL,
errors.APPOINTMENT_WRONG_FIELD_TYPE, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_FIELD_TOO_SMALL]
end_time_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_FIELD_TOO_SMALL,
errors.APPOINTMENT_WRONG_FIELD_TYPE, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_FIELD_TOO_SMALL]
dispute_delta_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_FIELD_TOO_SMALL,
errors.APPOINTMENT_WRONG_FIELD_TYPE, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_FIELD_TOO_SMALL]
encrypted_blob_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_WRONG_FIELD]
cipher_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_CIPHER_NOT_SUPPORTED, errors.APPOINTMENT_CIPHER_NOT_SUPPORTED]
hash_function_rets = [errors.APPOINTMENT_EMPTY_FIELD, errors.APPOINTMENT_WRONG_FIELD_TYPE,
errors.APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED, errors.APPOINTMENT_HASH_FUNCTION_NOT_SUPPORTED]
inspector = Inspector(debug=True, logging=logging)
print("Locator tests\n")
for locator, ret in zip(locators, locators_rets):
appointment["locator"] = locator
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Set locator to a 'valid' one
appointment['locator'] = 'A' * 64
print("\nStart time tests\n")
for start_time, ret in zip(start_times, start_time_rets):
appointment["start_time"] = start_time
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the start time to some time in the future
appointment['start_time'] = block_height + 10
print("\nEnd time tests\n")
for end_time, ret in zip(end_times, end_time_rets):
appointment["end_time"] = end_time
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the end time to something consistent with start time
appointment['end_time'] = block_height + 30
print("\nDelta tests\n")
for dispute_delta, ret in zip(dispute_deltas, dispute_delta_rets):
appointment["dispute_delta"] = dispute_delta
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the a proper dispute delta
appointment['dispute_delta'] = appointment['end_time'] - appointment['start_time']
print("\nEncrypted blob tests\n")
for encrypted_blob, ret in zip(encrypted_blobs, encrypted_blob_rets):
appointment["encrypted_blob"] = encrypted_blob
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the encrypted blob to something that may pass
appointment['encrypted_blob'] = 'A' * 32
print("\nCipher tests\n")
for cipher, ret in zip(ciphers, cipher_rets):
appointment["cipher"] = cipher
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the cipher to the only supported one for now
appointment['cipher'] = SUPPORTED_CIPHERS[0]
print("\nHash function tests\n")
for hash_function, ret in zip(hash_functions, hash_function_rets):
appointment["hash_function"] = hash_function
r = inspector.inspect(appointment)
assert r[0] == ret
print(r)
# Setting the cipher to the only supported one for now
appointment['hash_function'] = SUPPORTED_HASH_FUNCTIONS[0]
r = inspector.inspect(appointment)
assert type(r) == Appointment
print("\nAll tests passed!")

View File

View File

@@ -0,0 +1,109 @@
import os
import binascii
from pisa.utils.authproxy import AuthServiceProxy, JSONRPCException
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT
from pisa.tools import check_txid_format
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT))
# Help should always return 0
assert(bitcoin_cli.help() == 0)
# getblockhash should return a blockid (which matches the txid format)
block_hash = bitcoin_cli.getblockhash(0)
assert(check_txid_format(block_hash))
# Check that the values are within range and of the proper format (all should fail)
values = [-1, 500, None, '', '111', [], 1.1]
print("getblockhash fails ({}):".format(len(values)))
for v in values:
try:
block_hash = bitcoin_cli.getblockhash(v)
assert False
except JSONRPCException as e:
print('\t{}'.format(e))
# getblock should return a list of transactions and the height
block = bitcoin_cli.getblock(block_hash)
assert(isinstance(block.get('tx'), list))
assert(len(block.get('tx')) != 0)
assert(isinstance(block.get('height'), int))
# Some fails
values += ["a"*64, binascii.hexlify(os.urandom(32)).decode()]
print("\ngetblock fails ({}):".format(len(values)))
for v in values:
try:
block = bitcoin_cli.getblock(v)
assert False
except JSONRPCException as e:
print('\t{}'.format(e))
# decoderawtransaction should only return if the given transaction matches a txid format
coinbase_tx = block.get('tx')[0]
tx = bitcoin_cli.decoderawtransaction(coinbase_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('txid'), str))
assert(check_txid_format(tx.get('txid')))
# Therefore should also work for a random formatted 32-byte hex in our simulation
random_tx = binascii.hexlify(os.urandom(32)).decode()
tx = bitcoin_cli.decoderawtransaction(random_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('txid'), str))
assert(check_txid_format(tx.get('txid')))
# But it should fail for not proper formatted one
values = [1, None, '', "a"*63, "b"*65, [], binascii.hexlify(os.urandom(31)).hex()]
print("\ndecoderawtransaction fails ({}):".format(len(values)))
for v in values:
try:
block = bitcoin_cli.decoderawtransaction(v)
assert False
except JSONRPCException as e:
print('\t{}'.format(e))
# sendrawtransaction should only allow txids that the simulator has not mined yet
bitcoin_cli.sendrawtransaction(binascii.hexlify(os.urandom(32)).decode())
# Any data not matching the txid format or that matches with an already mined transaction should fail
values += [coinbase_tx]
print("\nsendrawtransaction fails ({}):".format(len(values)))
for v in values:
try:
block = bitcoin_cli.sendrawtransaction(v)
assert False
except JSONRPCException as e:
print('\t{}'.format(e))
# getrawtransaction should work for existing transactions, and fail for non-existing ones
tx = bitcoin_cli.getrawtransaction(coinbase_tx)
assert(isinstance(tx, dict))
assert(isinstance(tx.get('confirmations'), int))
print("\nsendrawtransaction fails ({}):".format(len(values)))
for v in values:
try:
block = bitcoin_cli.sendrawtransaction(v)
assert False
except JSONRPCException as e:
print('\t{}'.format(e))
# getblockcount should always return a positive integer
bc = bitcoin_cli.getblockcount()
assert (isinstance(bc, int))
assert (bc >= 0)
print("\nAll tests passed!")

View File

@@ -0,0 +1,214 @@
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
from flask import Flask, request, Response, abort
from test.simulator.zmq_publisher import ZMQPublisher
from threading import Thread
from pisa.rpc_errors import *
from pisa.tools import check_txid_format
import logging
import binascii
import json
import os
import time
app = Flask(__name__)
HOST = 'localhost'
PORT = '18443'
@app.route('/', methods=['POST'])
def process_request():
"""
process_requests simulates the bitcoin-rpc server run by bitcoind. The available commands are limited to the ones
we'll need to use in pisa. The model we will be using is pretty simplified to reduce the complexity of simulating
bitcoind:
Raw transactions: raw transactions will actually be transaction ids (txids). Pisa will, therefore, receive
encrypted blobs that encrypt ids instead of real transactions.
decoderawtransaction: querying for the decoding of a raw transaction will return a dictionary with a single
field: "txid", which will match with the txid provided in the request
sendrawtransaction: sending a rawtransaction will notify our mining simulator to include such transaction in a
subsequent block.
getrawtransaction: requesting a rawtransaction from a txid will return a dictionary containing a single field:
"confirmations", since rawtransactions are only queried to check whether a transaction has
made it to a block or not.
getblockcount: the block count will be get from the mining simulator by querying how many blocks have been
emited so far.
getblock: querying for a block will return a dictionary with a three fields: "tx" representing a list
of transactions, "height" representing the block height and "hash" representing the block
hash. Both will be got from the mining simulator.
getblockhash: a block hash is only queried by pisad on bootstrapping to check the network bitcoind is
running on.
help: help is only used as a sample command to test if bitcoind is running when bootstrapping
pisad. It will return a 200/OK with no data.
"""
global mempool
request_data = request.get_json()
method = request_data.get('method')
response = {"id": 0, "result": 0, "error": None}
no_param_err = {"code": RPC_MISC_ERROR, "message": "JSON value is not a {} as expected"}
if method == "decoderawtransaction":
txid = get_param(request_data)
if isinstance(txid, str):
if check_txid_format(txid):
response["result"] = {"txid": txid}
else:
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "sendrawtransaction":
# TODO: A way of rejecting transactions should be added to test edge cases.
txid = get_param(request_data)
if isinstance(txid, str):
if check_txid_format(txid):
if txid not in list(mined_transactions.keys()):
mempool.append(txid)
else:
response["error"] = {"code": RPC_VERIFY_ALREADY_IN_CHAIN,
"message": "Transaction already in block chain"}
else:
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getrawtransaction":
txid = get_param(request_data)
if isinstance(txid, str):
block = blocks.get(mined_transactions.get(txid))
if block:
response["result"] = {"confirmations": len(blockchain) - block.get('height')}
elif txid in mempool:
response["result"] = {"confirmations": 0}
else:
response["error"] = {'code': RPC_INVALID_ADDRESS_OR_KEY,
'message': 'No such mempool or blockchain transaction. Use gettransaction for '
'wallet transactions.'}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getblockcount":
response["result"] = len(blockchain)
elif method == "getblock":
blockid = get_param(request_data)
if isinstance(blockid, str):
block = blocks.get(blockid)
if block:
block["hash"] = blockid
response["result"] = block
else:
response["error"] = {"code": RPC_INVALID_ADDRESS_OR_KEY, "message": "Block not found"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getblockhash":
height = get_param(request_data)
if isinstance(height, int):
if 0 <= height <= len(blockchain):
response["result"] = blockchain[height]
else:
response["error"] = {"code": RPC_INVALID_PARAMETER, "message": "Block height out of range"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("integer")
elif method == "help":
pass
else:
return abort(404, "Method not found")
return Response(json.dumps(response), status=200, mimetype='application/json')
def get_param(request_data):
param = None
params = request_data.get("params")
if isinstance(params, list) and len(params) > 0:
param = params[0]
return param
def load_data():
pass
def simulate_mining():
global mempool, mined_transactions, blocks, blockchain
prev_block_hash = None
while True:
block_hash = binascii.hexlify(os.urandom(32)).decode('utf-8')
coinbase_tx_hash = binascii.hexlify(os.urandom(32)).decode('utf-8')
txs_to_mine = [coinbase_tx_hash]
if len(mempool) != 0:
# We'll mine up to 100 txs per block
txs_to_mine += mempool[:99]
mempool = mempool[99:]
# Keep track of the mined transaction (to respond to getrawtransaction)
for tx in txs_to_mine:
mined_transactions[tx] = block_hash
blocks[block_hash] = {"tx": txs_to_mine, "height": len(blockchain), "previousblockhash": prev_block_hash}
mining_simulator.publish_data(binascii.unhexlify(block_hash))
blockchain.append(block_hash)
prev_block_hash = block_hash
print("New block mined: {}".format(block_hash))
print("\tTransactions: {}".format(txs_to_mine))
time.sleep(10)
if __name__ == '__main__':
mining_simulator = ZMQPublisher(topic=b'hashblock', feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR,
feed_port=FEED_PORT)
mempool = []
mined_transactions = {}
blocks = {}
blockchain = []
mining_thread = Thread(target=simulate_mining)
mining_thread.start()
# Setting Flask log to ERROR only so it does not mess with out logging
logging.getLogger('werkzeug').setLevel(logging.ERROR)
app.run(host=HOST, port=PORT)

View File

@@ -0,0 +1,12 @@
import zmq
class ZMQPublisher:
def __init__(self, topic, feed_protocol, feed_addr, feed_port):
self.topic = topic
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
self.socket.bind("%s://%s:%s" % (feed_protocol, feed_addr, feed_port))
def publish_data(self, data):
self.socket.send_multipart([self.topic, data])

0
test/unit/__init__.py Normal file
View File

87
test/unit/test_cleaner.py Normal file
View File

@@ -0,0 +1,87 @@
import random
from os import urandom
from uuid import uuid4
from binascii import hexlify
from pisa import logging
from pisa.responder import Job
from pisa.cleaner import Cleaner
from pisa.appointment import Appointment
CONFIRMATIONS = 6
ITEMS = 10
MAX_ITEMS = 100
ITERATIONS = 1000
def set_up_appointments(total_appointments):
appointments = dict()
locator_uuid_map = dict()
for _ in range(total_appointments):
uuid = uuid4().hex
locator = hexlify(urandom(64))
appointments[uuid] = Appointment(locator, None, None, None, None, None, None)
locator_uuid_map[locator] = [uuid]
# Each locator can have more than one uuid assigned to it. Do a coin toss to add multiple ones
while random.randint(0, 1):
uuid = uuid4().hex
appointments[uuid] = Appointment(locator, None, None, None, None, None, None)
locator_uuid_map[locator].append(uuid)
return appointments, locator_uuid_map
def set_up_jobs(total_jobs):
jobs = dict()
tx_job_map = dict()
for _ in range(total_jobs):
uuid = uuid4().hex
txid = hexlify(urandom(64))
# Assign both justice_txid and dispute_txid the same id (it shouldn't matter)
jobs[uuid] = Job(txid, txid, None, None, None)
tx_job_map[txid] = [uuid]
# Each justice_txid can have more than one uuid assigned to it. Do a coin toss to add multiple ones
while random.randint(0, 1):
uuid = uuid4().hex
jobs[uuid] = Job(txid, txid, None, None, None)
tx_job_map[txid].append(uuid)
return jobs, tx_job_map
def test_delete_expired_appointment():
appointments, locator_uuid_map = set_up_appointments(MAX_ITEMS)
expired_appointments = random.sample(list(appointments.keys()), k=ITEMS)
Cleaner.delete_expired_appointment(expired_appointments, appointments, locator_uuid_map)
assert not set(expired_appointments).issubset(appointments.keys())
def test_delete_completed_jobs():
jobs, tx_job_map = set_up_jobs(MAX_ITEMS)
selected_jobs = random.sample(list(jobs.keys()), k=ITEMS)
completed_jobs = [(job, 6) for job in selected_jobs]
Cleaner.delete_completed_jobs(jobs, tx_job_map, completed_jobs, 0)
assert not set(completed_jobs).issubset(jobs.keys())
logging.getLogger().disabled = True
for _ in range(ITERATIONS):
test_delete_expired_appointment()
for _ in range(ITERATIONS):
test_delete_completed_jobs()