Merge pull request #81 from sr-gi/chainmaester

Adds basic logic to detect forks while offline
This commit is contained in:
Sergi Delgado Segura
2020-01-08 09:55:39 +01:00
committed by GitHub
14 changed files with 177 additions and 809 deletions

View File

@@ -39,6 +39,17 @@ jobs:
- ./venv
key: v1-dependencies-{{ checksum "pisa/requirements.txt" }}
# Get github dependencies (pending to add to PyPi)
- run:
name: get bitcoind mock
command: |
git clone git@github.com:sr-gi/bitcoind_mock.git
. venv/bin/activate
pip install -r bitcoind_mock/requirements.txt
cp bitcoind_mock/bitcoind_mock/sample_conf.py bitcoind_mock/bitcoind_mock/conf.py
mv bitcoind_mock/bitcoind_mock ~/repo/venv/lib/python3.6/site-packages
# run tests!
# this example uses Django's built-in test-runner
# other common Python testing frameworks include pytest and nose

View File

@@ -98,35 +98,8 @@ class BlockProcessor:
return tx
def get_missed_blocks(self, last_know_block_hash):
"""
Compute the blocks between the current best chain tip and a given block hash (``last_know_block_hash``).
This method is used to fetch all the missed information when recovering from a crash. Note that if the two
blocks are not part of the same chain, it would return all the blocks up to genesis.
Args:
last_know_block_hash (:obj:`str`): the hash of the last known block.
Returns:
:obj:`list`: A list of blocks between the last given block and the current best chain tip, starting from the
child of ``last_know_block_hash``.
"""
# FIXME: This needs to be integrated with the ChainMaester (soon TM) to allow dealing with forks.
current_block_hash = self.get_best_block_hash()
missed_blocks = []
while current_block_hash != last_know_block_hash and current_block_hash is not None:
missed_blocks.append(current_block_hash)
current_block = self.get_block(current_block_hash)
current_block_hash = current_block.get("previousblockhash")
return missed_blocks[::-1]
def get_distance_to_tip(self, target_block_hash):
@staticmethod
def get_distance_to_tip(target_block_hash):
"""
Compute the distance between a given block hash and the best chain tip.
@@ -142,10 +115,10 @@ class BlockProcessor:
distance = None
chain_tip = self.get_best_block_hash()
chain_tip_height = self.get_block(chain_tip).get("height")
chain_tip = BlockProcessor.get_best_block_hash()
chain_tip_height = BlockProcessor.get_block(chain_tip).get("height")
target_block = self.get_block(target_block_hash)
target_block = BlockProcessor.get_block(target_block_hash)
if target_block is not None:
target_block_height = target_block.get("height")
@@ -153,3 +126,85 @@ class BlockProcessor:
distance = chain_tip_height - target_block_height
return distance
@staticmethod
def get_missed_blocks(last_know_block_hash):
"""
Compute the blocks between the current best chain tip and a given block hash (``last_know_block_hash``).
This method is used to fetch all the missed information when recovering from a crash.
Args:
last_know_block_hash (:obj:`str`): the hash of the last known block.
Returns:
:obj:`list`: A list of blocks between the last given block and the current best chain tip, starting from the
child of ``last_know_block_hash``.
"""
current_block_hash = BlockProcessor.get_best_block_hash()
missed_blocks = []
while current_block_hash != last_know_block_hash and current_block_hash is not None:
missed_blocks.append(current_block_hash)
current_block = BlockProcessor.get_block(current_block_hash)
current_block_hash = current_block.get("previousblockhash")
return missed_blocks[::-1]
@staticmethod
def is_block_in_best_chain(block_hash):
"""
Checks whether or not a given block is on the best chain. Blocks are identified by block_hash.
A block that is not in the best chain will either not exists (block = None) or have a confirmation count of
-1 (implying that the block was forked out or the chain never grew from that one).
Args:
block_hash(:obj:`str`): the hash of the block to be checked.
Returns:
:obj:`bool`: ``True`` if the block is on the best chain, ``False`` otherwise.
Raises:
KeyError: If the block cannot be found in the blockchain.
"""
block = BlockProcessor.get_block(block_hash)
if block is None:
# This should never happen as long as we are using the same node, since bitcoind never drops orphan blocks
# and we have received this block from our node at some point.
raise KeyError("Block not found")
if block.get("confirmations") != -1:
return True
else:
return False
@staticmethod
def find_last_common_ancestor(last_known_block_hash):
"""
Finds the last common ancestor between the current best chain tip and the last block known by us (older block).
This is useful to recover from a chain fork happening while offline (crash/shutdown).
Args:
last_known_block_hash(:obj:`str`): the hash of the last know block.
Returns:
:obj:`tuple`: A tuple (:obj:`str`:, :obj:`list`:) where the first item contains the hash of the last common
ancestor and the second item contains the list of transactions from ``last_known_block_hash`` to
``last_common_ancestor``.
"""
target_block_hash = last_known_block_hash
dropped_txs = []
while not BlockProcessor.is_block_in_best_chain(target_block_hash):
block = BlockProcessor.get_block(target_block_hash)
dropped_txs.extend(block.get("tx"))
target_block_hash = block.get("previousblockhash")
return target_block_hash, dropped_txs

View File

@@ -64,20 +64,37 @@ if __name__ == "__main__":
last_block_watcher = db_manager.load_last_block_hash_watcher()
last_block_responder = db_manager.load_last_block_hash_responder()
missed_blocks_watcher = block_processor.get_missed_blocks(last_block_watcher)
missed_blocks_responder = (
missed_blocks_watcher
if last_block_watcher == last_block_responder
else block_processor.get_missed_blocks(last_block_responder)
)
# FIXME: 32-reorgs-offline dropped txs are not used at this point.
responder = Responder(db_manager)
responder.trackers, responder.tx_tracker_map = Builder.build_trackers(responder_trackers_data)
responder.block_queue = Builder.build_block_queue(missed_blocks_responder)
last_common_ancestor_responder = None
missed_blocks_responder = None
# Build Responder with backed up data if found
if last_block_responder is not None:
last_common_ancestor_responder, dropped_txs_responder = block_processor.find_last_common_ancestor(
last_block_responder
)
missed_blocks_responder = block_processor.get_missed_blocks(last_common_ancestor_responder)
responder.trackers, responder.tx_tracker_map = Builder.build_trackers(responder_trackers_data)
responder.block_queue = Builder.build_block_queue(missed_blocks_responder)
# Build Watcher with Responder and backed up data. If the blocks of both match we don't perform the
# search twice.
watcher.responder = responder
watcher.appointments, watcher.locator_uuid_map = Builder.build_appointments(watcher_appointments_data)
watcher.block_queue = Builder.build_block_queue(missed_blocks_watcher)
if last_block_watcher is not None:
if last_block_watcher == last_block_responder:
missed_blocks_watcher = missed_blocks_responder
else:
last_common_ancestor_watcher, dropped_txs_watcher = block_processor.find_last_common_ancestor(
last_block_watcher
)
missed_blocks_watcher = block_processor.get_missed_blocks(last_common_ancestor_watcher)
watcher.appointments, watcher.locator_uuid_map = Builder.build_appointments(
watcher_appointments_data
)
watcher.block_queue = Builder.build_block_queue(missed_blocks_watcher)
# Fire the API
API(watcher).start()

View File

@@ -17,9 +17,10 @@ from pisa.tools import bitcoin_cli
from pisa.db_manager import DBManager
from common.appointment import Appointment
from test.simulator.utils import sha256d
from test.simulator.transaction import TX
from test.simulator.bitcoind_sim import run_simulator, HOST, PORT
from bitcoind_mock.utils import sha256d
from bitcoind_mock.transaction import TX
from bitcoind_mock.bitcoind import BitcoindMock
from bitcoind_mock.conf import BTC_RPC_HOST, BTC_RPC_PORT
from common.constants import LOCATOR_LEN_HEX
from common.cryptographer import Cryptographer
@@ -27,7 +28,7 @@ from common.cryptographer import Cryptographer
@pytest.fixture(scope="session")
def run_bitcoind():
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event", "verbose": False})
bitcoind_thread = Thread(target=BitcoindMock().run, kwargs={"mode": "event", "verbose": True})
bitcoind_thread.daemon = True
bitcoind_thread.start()
@@ -63,7 +64,7 @@ def get_random_value_hex(nbytes):
def generate_block():
requests.post(url="http://{}:{}/generate".format(HOST, PORT), timeout=5)
requests.post(url="http://{}:{}/generate".format(BTC_RPC_HOST, BTC_RPC_PORT), timeout=5)
sleep(0.5)
@@ -72,6 +73,11 @@ def generate_blocks(n):
generate_block()
def fork(block_hash):
fork_endpoint = "http://{}:{}/fork".format(BTC_RPC_HOST, BTC_RPC_PORT)
requests.post(fork_endpoint, json={"parent": block_hash})
def generate_dummy_appointment_data(real_height=True, start_time_offset=5, end_time_offset=30):
if real_height:
current_height = bitcoin_cli().getblockcount()

View File

@@ -1,6 +1,5 @@
from binascii import unhexlify
from pisa import c_logger
from apps.cli.blob import Blob
from test.pisa.unit.conftest import get_random_value_hex

View File

@@ -1,8 +1,7 @@
import pytest
from pisa import c_logger
from pisa.block_processor import BlockProcessor
from test.pisa.unit.conftest import get_random_value_hex, generate_block, generate_blocks
from test.pisa.unit.conftest import get_random_value_hex, generate_block, generate_blocks, fork
hex_tx = (
@@ -57,8 +56,7 @@ def test_decode_raw_transaction_invalid():
def test_get_missed_blocks():
block_processor = BlockProcessor()
target_block = block_processor.get_best_block_hash()
target_block = BlockProcessor.get_best_block_hash()
# Generate some blocks and store the hash in a list
missed_blocks = []
@@ -67,24 +65,52 @@ def test_get_missed_blocks():
missed_blocks.append(BlockProcessor.get_best_block_hash())
# Check what we've missed
assert block_processor.get_missed_blocks(target_block) == missed_blocks
assert BlockProcessor.get_missed_blocks(target_block) == missed_blocks
# We can see how it does not work if we replace the target by the first element in the list
block_tip = missed_blocks[0]
assert block_processor.get_missed_blocks(block_tip) != missed_blocks
assert BlockProcessor.get_missed_blocks(block_tip) != missed_blocks
# But it does again if we skip that block
assert block_processor.get_missed_blocks(block_tip) == missed_blocks[1:]
assert BlockProcessor.get_missed_blocks(block_tip) == missed_blocks[1:]
def test_get_distance_to_tip():
target_distance = 5
block_processor = BlockProcessor()
target_block = block_processor.get_best_block_hash()
target_block = BlockProcessor.get_best_block_hash()
# Mine some blocks up to the target distance
generate_blocks(target_distance)
# Check if the distance is properly computed
assert block_processor.get_distance_to_tip(target_block) == target_distance
assert BlockProcessor.get_distance_to_tip(target_block) == target_distance
def test_is_block_in_best_chain():
best_block_hash = BlockProcessor.get_best_block_hash()
best_block = BlockProcessor.get_block(best_block_hash)
assert BlockProcessor.is_block_in_best_chain(best_block_hash)
fork(best_block.get("previousblockhash"))
generate_blocks(2)
assert not BlockProcessor.is_block_in_best_chain(best_block_hash)
def test_find_last_common_ancestor():
ancestor = BlockProcessor.get_best_block_hash()
generate_blocks(3)
best_block_hash = BlockProcessor.get_best_block_hash()
# Create a fork (forking creates a block if the mock is set by events)
fork(ancestor)
# Create another block to make the best tip change (now both chains are at the same height)
generate_blocks(5)
# The last common ancestor between the old best and the new best should be the "ancestor"
last_common_ancestor, dropped_txs = BlockProcessor.find_last_common_ancestor(best_block_hash)
assert last_common_ancestor == ancestor
assert len(dropped_txs) == 3

View File

@@ -1,8 +1,8 @@
import pytest
from pisa.carrier import Carrier
from test.simulator.utils import sha256d
from test.simulator.transaction import TX
from bitcoind_mock.utils import sha256d
from bitcoind_mock.transaction import TX
from test.pisa.unit.conftest import generate_blocks, get_random_value_hex
from pisa.rpc_errors import RPC_VERIFY_ALREADY_IN_CHAIN, RPC_DESERIALIZATION_ERROR

View File

@@ -15,8 +15,8 @@ from pisa.tools import bitcoin_cli
from common.constants import LOCATOR_LEN_HEX
from common.tools import check_sha256_hex_format
from test.simulator.utils import sha256d
from test.simulator.bitcoind_sim import TX
from bitcoind_mock.utils import sha256d
from bitcoind_mock.transaction import TX
from test.pisa.unit.conftest import generate_block, generate_blocks, get_random_value_hex

View File

@@ -1,306 +0,0 @@
import os
import time
import json
import logging
import binascii
from itertools import islice
from threading import Thread, Event
from flask import Flask, request, Response, abort
from pisa.rpc_errors import *
from test.simulator.utils import sha256d
from test.simulator.transaction import TX
from test.simulator.zmq_publisher import ZMQPublisher
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT
app = Flask(__name__)
HOST = "localhost"
PORT = "18443"
blockchain = []
blocks = {}
mined_transactions = {}
mempool = {}
mine_new_block = Event()
TIME_BETWEEN_BLOCKS = 5
GENESIS_PARENT = "0000000000000000000000000000000000000000000000000000000000000000"
prev_block_hash = GENESIS_PARENT
@app.route("/generate", methods=["POST"])
def generate():
global mine_new_block
mine_new_block.set()
return Response(status=200, mimetype="application/json")
@app.route("/fork", methods=["POST"])
def create_fork():
"""
create_fork processes chain fork requests. It will create a fork with the following parameters:
parent: the block hash from where the chain will be forked
length: the length of the fork to be created (number of blocks to be mined on top of parent)
stay: whether to stay in the forked chain after length blocks has been mined or to come back to the previous chain.
Stay is optional and will default to False.
"""
global prev_block_hash
request_data = request.get_json()
response = {"result": 0, "error": None}
parent = request_data.get("parent")
# FIXME: We only accept forks one by one for now
if parent not in blocks:
response["error"] = {"code": -1, "message": "Wrong parent block to fork from"}
else:
prev_block_hash = parent
print("Forking chain from {}".format(parent))
# FIXME: the blockchain is defined as a list (since forks in the sim where not possible til recently). Therefore
# block heights and blockchain length is currently incorrect. It does the trick to test forks, but should
# be fixed for better testing.
return Response(json.dumps(response), status=200, mimetype="application/json")
@app.route("/", methods=["POST"])
def process_request():
"""
process_requests simulates the bitcoin-rpc server run by bitcoind. The available commands are limited to the ones
we'll need to use in pisa. The model we will be using is pretty simplified to reduce the complexity of simulating
bitcoind:
Raw transactions: raw transactions will actually be transaction ids (txids). Pisa will, therefore, receive
encrypted blobs that encrypt ids instead of real transactions.
decoderawtransaction: querying for the decoding of a raw transaction will return a dictionary with a single
field: "txid", which will match with the txid provided in the request
sendrawtransaction: sending a rawtransaction will notify our mining simulator to include such transaction in a
subsequent block.
getrawtransaction: requesting a rawtransaction from a txid will return a dictionary containing a single field:
"confirmations", since rawtransactions are only queried to check whether a transaction has
made it to a block or not.
getblockcount: the block count will be get from the mining simulator by querying how many blocks have been
emited so far.
getblock: querying for a block will return a dictionary with a three fields: "tx" representing a list
of transactions, "height" representing the block height and "hash" representing the block
hash. Both will be got from the mining simulator.
getblockhash: a block hash is only queried by pisad on bootstrapping to check the network bitcoind is
running on.
getbestblockhash: returns the hash of the block in the tip of the chain
help: help is only used as a sample command to test if bitcoind is running when bootstrapping
pisad. It will return a 200/OK with no data.
"""
global mempool
request_data = request.get_json()
method = request_data.get("method")
response = {"id": 0, "result": 0, "error": None}
no_param_err = {"code": RPC_MISC_ERROR, "message": "JSON value is not a {} as expected"}
if method == "decoderawtransaction":
rawtx = get_param(request_data)
if isinstance(rawtx, str) and len(rawtx) % 2 is 0:
txid = sha256d(rawtx)
if TX.deserialize(rawtx) is not None:
response["result"] = {"txid": txid}
else:
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "sendrawtransaction":
# TODO: A way of rejecting transactions should be added to test edge cases.
rawtx = get_param(request_data)
if isinstance(rawtx, str) and len(rawtx) % 2 is 0:
txid = sha256d(rawtx)
if TX.deserialize(rawtx) is not None:
if txid not in list(mined_transactions.keys()):
mempool[txid] = rawtx
response["result"] = {"txid": txid}
else:
response["error"] = {
"code": RPC_VERIFY_ALREADY_IN_CHAIN,
"message": "Transaction already in block chain",
}
else:
response["error"] = {"code": RPC_DESERIALIZATION_ERROR, "message": "TX decode failed"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getrawtransaction":
txid = get_param(request_data)
if isinstance(txid, str):
if txid in mined_transactions:
block = blocks.get(mined_transactions[txid]["block"])
rawtx = mined_transactions[txid].get("tx")
response["result"] = {"hex": rawtx, "confirmations": len(blockchain) - block.get("height")}
elif txid in mempool:
response["result"] = {"confirmations": None}
else:
response["error"] = {
"code": RPC_INVALID_ADDRESS_OR_KEY,
"message": "No such mempool or blockchain transaction. Use gettransaction for "
"wallet transactions.",
}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getblockcount":
response["result"] = len(blockchain)
elif method == "getblock":
blockid = get_param(request_data)
if isinstance(blockid, str):
block = blocks.get(blockid)
if block is not None:
block["hash"] = blockid
# FIXME: the confirmation counter depends on the chain the transaction is in (in case of forks). For
# now there will be only one, but multiple forks would come up handy to test edge cases
block["confirmations"] = len(blockchain) - block["height"] + 1
response["result"] = block
else:
response["error"] = {"code": RPC_INVALID_ADDRESS_OR_KEY, "message": "Block not found"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("string")
elif method == "getblockhash":
height = get_param(request_data)
if isinstance(height, int):
if 0 <= height <= len(blockchain):
response["result"] = blockchain[height]
else:
response["error"] = {"code": RPC_INVALID_PARAMETER, "message": "Block height out of range"}
else:
response["error"] = no_param_err
response["error"]["message"] = response["error"]["message"].format("integer")
elif method == "getbestblockhash":
response["result"] = blockchain[-1]
elif method == "help":
pass
else:
return abort(404, "Method not found")
return Response(json.dumps(response), status=200, mimetype="application/json")
def get_param(request_data):
param = None
params = request_data.get("params")
if isinstance(params, list) and len(params) > 0:
param = params[0]
return param
def load_data():
pass
def simulate_mining(mode, time_between_blocks, verbose=True):
global mempool, mined_transactions, blocks, blockchain, mine_new_block, prev_block_hash
mining_simulator = ZMQPublisher(
topic=b"hashblock", feed_protocol=FEED_PROTOCOL, feed_addr=FEED_ADDR, feed_port=FEED_PORT
)
# Set the mining event to initialize the blockchain with a block
mine_new_block.set()
while mine_new_block.wait():
block_hash = os.urandom(32).hex()
coinbase_tx = TX.create_dummy_transaction()
coinbase_tx_hash = sha256d(coinbase_tx)
txs_to_mine = dict({coinbase_tx_hash: coinbase_tx})
if len(mempool) != 0:
# We'll mine up to 100 txs per block
for txid, rawtx in dict(islice(mempool.items(), 99)).items():
txs_to_mine[txid] = rawtx
mempool.pop(txid)
# Keep track of the mined transaction (to respond to getrawtransaction)
for txid, tx in txs_to_mine.items():
mined_transactions[txid] = {"tx": tx, "block": block_hash}
# FIXME: chain_work is being defined as a incremental counter for now. Multiple chains should be possible.
blocks[block_hash] = {
"tx": list(txs_to_mine.keys()),
"height": len(blockchain),
"previousblockhash": prev_block_hash,
"chainwork": "{:x}".format(len(blockchain)),
}
mining_simulator.publish_data(binascii.unhexlify(block_hash))
blockchain.append(block_hash)
prev_block_hash = block_hash
if verbose:
print("New block mined: {}".format(block_hash))
print("\tTransactions: {}".format(list(txs_to_mine.keys())))
if mode == "time":
time.sleep(time_between_blocks)
else:
mine_new_block.clear()
def run_simulator(mode="time", time_between_blocks=TIME_BETWEEN_BLOCKS, verbose=True):
if mode not in ["time", "event"]:
raise ValueError("Node must be time or event")
mining_thread = Thread(target=simulate_mining, args=[mode, time_between_blocks, verbose])
mining_thread.start()
# Setting Flask log to ERROR only so it does not mess with out logging. Also disabling flask initial messages
logging.getLogger("werkzeug").setLevel(logging.ERROR)
os.environ["WERKZEUG_RUN_MAIN"] = "true"
app.run(host=HOST, port=PORT)

View File

@@ -1,143 +0,0 @@
import re
import pytest
from time import sleep
from threading import Thread
from test.simulator.transaction import TX
from test.pisa.unit import get_random_value_hex
from test.simulator.bitcoind_sim import run_simulator
from pisa.utils.auth_proxy import AuthServiceProxy, JSONRPCException
from pisa.conf import BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT
MIXED_VALUES = values = [-1, 500, "", "111", [], 1.1, None, "", "a" * 31, "b" * 33, get_random_value_hex(32)]
bitcoin_cli = AuthServiceProxy("http://%s:%s@%s:%d" % (BTC_RPC_USER, BTC_RPC_PASSWD, BTC_RPC_HOST, BTC_RPC_PORT))
@pytest.fixture(scope="module")
def run_bitcoind():
bitcoind_thread = Thread(target=run_simulator, kwargs={"mode": "event"})
bitcoind_thread.daemon = True
bitcoind_thread.start()
# It takes a little bit of time to start the API (otherwise the requests are sent too early and they fail)
sleep(0.1)
@pytest.fixture(scope="module")
def genesis_block_hash(run_bitcoind):
return bitcoin_cli.getblockhash(0)
def check_hash_format(txid):
# TODO: #12-check-txid-regexp
return isinstance(txid, str) and re.search(r"^[0-9A-Fa-f]{64}$", txid) is not None
def test_help(run_bitcoind):
# Help should always return 0
assert bitcoin_cli.help() == 0
# FIXME: Better assert for the exceptions would be nice (check the returned errno is the expected one)
def test_getblockhash(genesis_block_hash):
# First block
assert check_hash_format(genesis_block_hash)
# Check that the values are within range and of the proper format (all should fail)
for v in MIXED_VALUES:
try:
bitcoin_cli.getblockhash(v)
assert False
except JSONRPCException as e:
assert True
def test_get_block(genesis_block_hash):
# getblock should return a list of transactions and the height
block = bitcoin_cli.getblock(genesis_block_hash)
assert isinstance(block.get("tx"), list)
assert len(block.get("tx")) != 0
assert isinstance(block.get("height"), int)
# It should fail for wrong data formats and random ids
for v in MIXED_VALUES:
try:
bitcoin_cli.getblock(v)
assert False
except JSONRPCException as e:
assert True
def test_decoderawtransaction(genesis_block_hash):
# decoderawtransaction should only return if the given transaction matches a txid format
block = bitcoin_cli.getblock(genesis_block_hash)
coinbase_txid = block.get("tx")[0]
coinbase_tx = bitcoin_cli.getrawtransaction(coinbase_txid).get("hex")
tx = bitcoin_cli.decoderawtransaction(coinbase_tx)
assert isinstance(tx, dict)
assert isinstance(tx.get("txid"), str)
assert check_hash_format(tx.get("txid"))
# Therefore should also work for a random transaction hex in our simulation
random_tx = TX.create_dummy_transaction()
tx = bitcoin_cli.decoderawtransaction(random_tx)
assert isinstance(tx, dict)
assert isinstance(tx.get("txid"), str)
assert check_hash_format(tx.get("txid"))
# But it should fail for not proper formatted one
for v in MIXED_VALUES:
try:
bitcoin_cli.decoderawtransaction(v)
assert False
except JSONRPCException as e:
assert True
def test_sendrawtransaction(genesis_block_hash):
# sendrawtransaction should only allow txids that the simulator has not mined yet
bitcoin_cli.sendrawtransaction(TX.create_dummy_transaction())
# Any data not matching the txid format or that matches with an already mined transaction should fail
try:
genesis_tx = bitcoin_cli.getblock(genesis_block_hash).get("tx")[0]
bitcoin_cli.sendrawtransaction(genesis_tx)
assert False
except JSONRPCException as e:
assert True
for v in MIXED_VALUES:
try:
bitcoin_cli.sendrawtransaction(v)
assert False
except JSONRPCException as e:
assert True
def test_getrawtransaction(genesis_block_hash):
# getrawtransaction should work for existing transactions, and fail for non-existing ones
genesis_tx = bitcoin_cli.getblock(genesis_block_hash).get("tx")[0]
tx = bitcoin_cli.getrawtransaction(genesis_tx)
assert isinstance(tx, dict)
assert isinstance(tx.get("confirmations"), int)
for v in MIXED_VALUES:
try:
bitcoin_cli.getrawtransaction(v)
assert False
except JSONRPCException as e:
assert True
def test_getblockcount():
# getblockcount should always return a positive integer
bc = bitcoin_cli.getblockcount()
assert isinstance(bc, int)
assert bc >= 0

View File

@@ -1,152 +0,0 @@
# Porting some functionality from https://github.com/sr-gi/bitcoin_tools with some modifications <3
from os import urandom
from test.simulator.utils import *
class TX:
""" Defines a class TX (transaction) that holds all the modifiable fields of a Bitcoin transaction, such as
version, number of inputs, reference to previous transactions, input and output scripts, value, etc.
"""
def __init__(self):
self.version = None
self.inputs = None
self.outputs = None
self.nLockTime = None
self.prev_tx_id = []
self.prev_out_index = []
self.scriptSig = []
self.scriptSig_len = []
self.nSequence = []
self.value = []
self.scriptPubKey = []
self.scriptPubKey_len = []
self.offset = 0
self.hex = ""
@classmethod
def deserialize(cls, hex_tx):
""" Builds a transaction object from the hexadecimal serialization format of a transaction that
could be obtained, for example, from a blockexplorer.
:param hex_tx: Hexadecimal serialized transaction.
:type hex_tx: hex str
:return: The transaction build using the provided hex serialized transaction.
:rtype: TX
"""
tx = cls()
tx.hex = hex_tx
try:
tx.version = int(change_endianness(parse_element(tx, 4)), 16)
# INPUTS
tx.inputs = int(parse_varint(tx), 16)
for i in range(tx.inputs):
tx.prev_tx_id.append(change_endianness(parse_element(tx, 32)))
tx.prev_out_index.append(int(change_endianness(parse_element(tx, 4)), 16))
# ScriptSig
tx.scriptSig_len.append(int(parse_varint(tx), 16))
tx.scriptSig.append(parse_element(tx, tx.scriptSig_len[i]))
tx.nSequence.append(int(parse_element(tx, 4), 16))
# OUTPUTS
tx.outputs = int(parse_varint(tx), 16)
for i in range(tx.outputs):
tx.value.append(int(change_endianness(parse_element(tx, 8)), 16))
# ScriptPubKey
tx.scriptPubKey_len.append(int(parse_varint(tx), 16))
tx.scriptPubKey.append(parse_element(tx, tx.scriptPubKey_len[i]))
tx.nLockTime = int(parse_element(tx, 4), 16)
if tx.offset != len(tx.hex):
# There is some error in the serialized transaction passed as input. Transaction can't be built
tx = None
else:
tx.offset = 0
except ValueError:
# If a parsing error occurs, the deserialization stops and None is returned
tx = None
return tx
def serialize(self, rtype=hex):
""" Serialize all the transaction fields arranged in the proper order, resulting in a hexadecimal string
ready to be broadcast to the network.
:param self: self
:type self: TX
:param rtype: Whether the serialized transaction is returned as a hex str or a byte array.
:type rtype: hex or bool
:return: Serialized transaction representation (hexadecimal or bin depending on rtype parameter).
:rtype: hex str / bin
"""
if rtype not in [hex, bin]:
raise Exception("Invalid return type (rtype). It should be either hex or bin.")
serialized_tx = change_endianness(int2bytes(self.version, 4)) # 4-byte version number (LE).
# INPUTS
serialized_tx += encode_varint(self.inputs) # Varint number of inputs.
for i in range(self.inputs):
serialized_tx += change_endianness(self.prev_tx_id[i]) # 32-byte hash of the previous transaction (LE).
serialized_tx += change_endianness(int2bytes(self.prev_out_index[i], 4)) # 4-byte output index (LE)
serialized_tx += encode_varint(len(self.scriptSig[i]) // 2) # Varint input script length.
# ScriptSig
serialized_tx += self.scriptSig[i] # Input script.
serialized_tx += int2bytes(self.nSequence[i], 4) # 4-byte sequence number.
# OUTPUTS
serialized_tx += encode_varint(self.outputs) # Varint number of outputs.
if self.outputs != 0:
for i in range(self.outputs):
serialized_tx += change_endianness(int2bytes(self.value[i], 8)) # 8-byte field Satoshi value (LE)
# ScriptPubKey
serialized_tx += encode_varint(len(self.scriptPubKey[i]) // 2) # Varint Output script length.
serialized_tx += self.scriptPubKey[i] # Output script.
serialized_tx += int2bytes(self.nLockTime, 4) # 4-byte lock time field
# If return type has been set to binary, the serialized transaction is converted.
if rtype is bin:
serialized_tx = unhexlify(serialized_tx)
return serialized_tx
@staticmethod
def create_dummy_transaction(prev_tx_id=None, prev_out_index=None):
tx = TX()
if prev_tx_id is None:
prev_tx_id = urandom(32).hex()
if prev_out_index is None:
prev_out_index = 0
tx.version = 1
tx.inputs = 1
tx.outputs = 1
tx.prev_tx_id = [prev_tx_id]
tx.prev_out_index = [prev_out_index]
tx.nLockTime = 0
tx.scriptSig = [
"47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860"
"a4acdd12909d831cc56cbbac4622082221a8768d1d0901"
]
tx.scriptSig_len = [77]
tx.nSequence = [4294967295]
tx.value = [5000000000]
tx.scriptPubKey = [
"4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d71302fa28414e7aab37397f554a7df5f142c21c"
"1b7303b8a0626f1baded5c72a704f7e6cd84cac"
]
tx.scriptPubKey_len = [67]
return tx.serialize()

View File

@@ -1,133 +0,0 @@
# Porting some functionality from https://github.com/sr-gi/bitcoin_tools with some modifications <3
from hashlib import sha256
from binascii import unhexlify, hexlify
def change_endianness(x):
""" Changes the endianness (from BE to LE and vice versa) of a given value.
:param x: Given value which endianness will be changed.
:type x: hex str
:return: The opposite endianness representation of the given value.
:rtype: hex str
"""
# If there is an odd number of elements, we make it even by adding a 0
if (len(x) % 2) == 1:
x += "0"
y = unhexlify(x)
z = y[::-1]
return hexlify(z).decode("utf-8")
def parse_varint(tx):
""" Parses a given transaction for extracting an encoded varint element.
:param tx: Transaction where the element will be extracted.
:type tx: TX
:return: The b-bytes representation of the given value (a) in hex format.
:rtype: hex str
"""
# First of all, the offset of the hex transaction if moved to the proper position (i.e where the varint should be
# located) and the length and format of the data to be analyzed is checked.
data = tx.hex[tx.offset :]
if len(data) > 0:
size = int(data[:2], 16)
else:
raise ValueError("No data to be parsed")
if size > 255:
raise ValueError("Wrong value (varint size > 255)")
# Then, the integer is encoded as a varint using the proper prefix, if needed.
if size <= 252: # No prefix
storage_length = 1
elif size == 253: # 0xFD
storage_length = 3
elif size == 254: # 0xFE
storage_length = 5
elif size == 255: # 0xFF
storage_length = 9
else:
raise Exception("Wrong input data size")
# Finally, the storage length is used to extract the proper number of bytes from the transaction hex and the
# transaction offset is updated.
varint = data[: storage_length * 2]
tx.offset += storage_length * 2
return varint
def parse_element(tx, size):
""" Parses a given transaction to extract an element of a given size.
:param tx: Transaction where the element will be extracted.
:type tx: TX
:param size: Size of the parameter to be extracted.
:type size: int
:return: The extracted element.
:rtype: hex str
"""
element = tx.hex[tx.offset : tx.offset + size * 2]
tx.offset += size * 2
return element
def encode_varint(value):
""" Encodes a given integer value to a varint. It only used the four varint representation cases used by bitcoin:
1-byte, 2-byte, 4-byte or 8-byte integers.
:param value: The integer value that will be encoded into varint.
:type value: int
:return: The varint representation of the given integer value.
:rtype: str
"""
# The value is checked in order to choose the size of its final representation.
# 0xFD(253), 0xFE(254) and 0xFF(255) are special cases, since are the prefixes defined for 2-byte, 4-byte
# and 8-byte long values respectively.
if value < pow(2, 8) - 3:
size = 1
varint = int2bytes(value, size) # No prefix
else:
if value < pow(2, 16):
size = 2
prefix = 253 # 0xFD
elif value < pow(2, 32):
size = 4
prefix = 254 # 0xFE
elif value < pow(2, 64):
size = 8
prefix = 255 # 0xFF
else:
raise Exception("Wrong input data size")
varint = format(prefix, "x") + change_endianness(int2bytes(value, size))
return varint
def int2bytes(a, b):
""" Converts a given integer value (a) its b-byte representation, in hex format.
:param a: Value to be converted.
:type a: int
:param b: Byte size to be filled.
:type b: int
:return: The b-bytes representation of the given value (a) in hex format.
:rtype: hex str
"""
m = pow(2, 8 * b) - 1
if a > m:
raise Exception(
str(a) + " is too big to be represented with " + str(b) + " bytes. Maximum value is " + str(m) + "."
)
return ("%0" + str(2 * b) + "x") % a
def sha256d(hex_data):
data = unhexlify(hex_data)
double_sha256 = sha256(sha256(data).digest()).hexdigest()
return change_endianness(double_sha256)

View File

@@ -1,12 +0,0 @@
import zmq
class ZMQPublisher:
def __init__(self, topic, feed_protocol, feed_addr, feed_port):
self.topic = topic
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
self.socket.bind("%s://%s:%s" % (feed_protocol, feed_addr, feed_port))
def publish_data(self, data):
self.socket.send_multipart([self.topic, data])