Merge pull request #86 from sr-gi/improve_config

Improves config and init files
This commit is contained in:
Sergi Delgado Segura
2020-01-23 21:18:24 +01:00
committed by GitHub
28 changed files with 409 additions and 242 deletions

View File

@@ -60,8 +60,10 @@ jobs:
# Run unit tests # Run unit tests
- run: - run:
name: Create pisa config name: Creates config files
command: cp pisa/sample_conf.py pisa/conf.py command: |
cp pisa/sample_conf.py pisa/conf.py
cp apps/cli/sample_conf.py apps/cli/conf.py
- run: - run:
name: Run pisa unit tests name: Run pisa unit tests
@@ -87,9 +89,8 @@ jobs:
command: | command: |
. venv/bin/activate . venv/bin/activate
cp test/pisa/e2e/pisa-conf.py pisa/conf.py cp test/pisa/e2e/pisa-conf.py pisa/conf.py
cd apps/ python3 -m apps.generate_key -d ~/.pisa_btc/
python3 -m generate_key python3 -m apps.generate_key -n cli -d ~/.pisa_btc/
python3 -m generate_key -n cli
# Run E2E tests # Run E2E tests
@@ -97,7 +98,6 @@ jobs:
name: Run e2e tests name: Run e2e tests
command: | command: |
. venv/bin/activate . venv/bin/activate
python3 -m pisa.pisad &
pytest test/pisa/e2e/ pytest test/pisa/e2e/
# - store_artifacts: # - store_artifacts:

1
.gitignore vendored
View File

@@ -17,3 +17,4 @@ test.py
.coverage .coverage
htmlcov htmlcov
docs/ docs/
.pisa_btc

View File

@@ -1,33 +1,28 @@
import logging import os
import apps.cli.conf as conf
from common.tools import extend_paths, check_conf_fields, setup_logging, setup_data_folder
# PISA-SERVER LOG_PREFIX = "cli"
DEFAULT_PISA_API_SERVER = "btc.pisa.watch"
DEFAULT_PISA_API_PORT = 9814
# PISA-CLI # Load config fields
CLIENT_LOG_FILE = "pisa-cli.log" conf_fields = {
APPOINTMENTS_FOLDER_NAME = "appointments" "DEFAULT_PISA_API_SERVER": {"value": conf.DEFAULT_PISA_API_SERVER, "type": str},
"DEFAULT_PISA_API_PORT": {"value": conf.DEFAULT_PISA_API_PORT, "type": int},
"DATA_FOLDER": {"value": conf.DATA_FOLDER, "type": str},
"CLIENT_LOG_FILE": {"value": conf.CLIENT_LOG_FILE, "type": str, "path": True},
"APPOINTMENTS_FOLDER_NAME": {"value": conf.APPOINTMENTS_FOLDER_NAME, "type": str, "path": True},
"CLI_PUBLIC_KEY": {"value": conf.CLI_PUBLIC_KEY, "type": str, "path": True},
"CLI_PRIVATE_KEY": {"value": conf.CLI_PRIVATE_KEY, "type": str, "path": True},
"PISA_PUBLIC_KEY": {"value": conf.PISA_PUBLIC_KEY, "type": str, "path": True},
}
CLI_PUBLIC_KEY = "cli_pk.der" # Expand user (~) if found and check fields are correct
CLI_PRIVATE_KEY = "cli_sk.der" conf_fields["DATA_FOLDER"]["value"] = os.path.expanduser(conf_fields["DATA_FOLDER"]["value"])
PISA_PUBLIC_KEY = "pisa_pk.der" # Extend relative paths
conf_fields = extend_paths(conf_fields["DATA_FOLDER"]["value"], conf_fields)
# Create the file logger # Sanity check fields and build config dictionary
f_logger = logging.getLogger("cli_file_log") config = check_conf_fields(conf_fields)
f_logger.setLevel(logging.INFO)
fh = logging.FileHandler(CLIENT_LOG_FILE) setup_data_folder(config.get("DATA_FOLDER"))
fh.setLevel(logging.INFO) setup_logging(config.get("CLIENT_LOG_FILE"), LOG_PREFIX)
fh_formatter = logging.Formatter("%(message)s")
fh.setFormatter(fh_formatter)
f_logger.addHandler(fh)
# Create the console logger
c_logger = logging.getLogger("cli_console_log")
c_logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch_formatter = logging.Formatter("%(asctime)s %(message)s.", "%Y-%m-%d %H:%M:%S")
ch.setFormatter(ch_formatter)
c_logger.addHandler(ch)

View File

@@ -9,16 +9,9 @@ from getopt import getopt, GetoptError
from requests import ConnectTimeout, ConnectionError from requests import ConnectTimeout, ConnectionError
from uuid import uuid4 from uuid import uuid4
from apps.cli import config, LOG_PREFIX
from apps.cli.help import help_add_appointment, help_get_appointment from apps.cli.help import help_add_appointment, help_get_appointment
from apps.cli.blob import Blob from apps.cli.blob import Blob
from apps.cli import (
DEFAULT_PISA_API_SERVER,
DEFAULT_PISA_API_PORT,
CLI_PUBLIC_KEY,
CLI_PRIVATE_KEY,
PISA_PUBLIC_KEY,
APPOINTMENTS_FOLDER_NAME,
)
from common.logger import Logger from common.logger import Logger
from common.appointment import Appointment from common.appointment import Appointment
@@ -27,7 +20,7 @@ from common.tools import check_sha256_hex_format, check_locator_format, compute_
HTTP_OK = 200 HTTP_OK = 200
logger = Logger("Client") logger = Logger(actor="Client", log_name_prefix=LOG_PREFIX)
# FIXME: TESTING ENDPOINT, WON'T BE THERE IN PRODUCTION # FIXME: TESTING ENDPOINT, WON'T BE THERE IN PRODUCTION
@@ -73,13 +66,13 @@ def load_key_file_data(file_name):
# Makes sure that the folder APPOINTMENTS_FOLDER_NAME exists, then saves the appointment and signature in it. # Makes sure that the folder APPOINTMENTS_FOLDER_NAME exists, then saves the appointment and signature in it.
def save_signed_appointment(appointment, signature): def save_signed_appointment(appointment, signature):
# Create the appointments directory if it doesn't already exist # Create the appointments directory if it doesn't already exist
os.makedirs(APPOINTMENTS_FOLDER_NAME, exist_ok=True) os.makedirs(config.get("APPOINTMENTS_FOLDER_NAME"), exist_ok=True)
timestamp = int(time.time()) timestamp = int(time.time())
locator = appointment["locator"] locator = appointment["locator"]
uuid = uuid4().hex # prevent filename collisions uuid = uuid4().hex # prevent filename collisions
filename = "{}/appointment-{}-{}-{}.json".format(APPOINTMENTS_FOLDER_NAME, timestamp, locator, uuid) filename = "{}/appointment-{}-{}-{}.json".format(config.get("APPOINTMENTS_FOLDER_NAME"), timestamp, locator, uuid)
data = {"appointment": appointment, "signature": signature} data = {"appointment": appointment, "signature": signature}
with open(filename, "w") as f: with open(filename, "w") as f:
@@ -233,7 +226,7 @@ def post_data_to_add_appointment_endpoint(data):
# Verify that the signature returned from the watchtower is valid. # Verify that the signature returned from the watchtower is valid.
def check_signature(signature, appointment): def check_signature(signature, appointment):
try: try:
pisa_pk_der = load_key_file_data(PISA_PUBLIC_KEY) pisa_pk_der = load_key_file_data(config.get("PISA_PUBLIC_KEY"))
pisa_pk = Cryptographer.load_public_key_der(pisa_pk_der) pisa_pk = Cryptographer.load_public_key_der(pisa_pk_der)
if pisa_pk is None: if pisa_pk is None:
@@ -287,7 +280,7 @@ def get_appointment(args):
def get_appointment_signature(appointment): def get_appointment_signature(appointment):
try: try:
sk_der = load_key_file_data(CLI_PRIVATE_KEY) sk_der = load_key_file_data(config.get("CLI_PRIVATE_KEY"))
cli_sk = Cryptographer.load_private_key_der(sk_der) cli_sk = Cryptographer.load_private_key_der(sk_der)
signature = Cryptographer.sign(appointment.serialize(), cli_sk) signature = Cryptographer.sign(appointment.serialize(), cli_sk)
@@ -309,7 +302,7 @@ def get_appointment_signature(appointment):
def get_pk(): def get_pk():
try: try:
cli_pk_der = load_key_file_data(CLI_PUBLIC_KEY) cli_pk_der = load_key_file_data(config.get("CLI_PUBLIC_KEY"))
hex_pk_der = binascii.hexlify(cli_pk_der) hex_pk_der = binascii.hexlify(cli_pk_der)
return hex_pk_der return hex_pk_der
@@ -345,8 +338,8 @@ def show_usage():
if __name__ == "__main__": if __name__ == "__main__":
pisa_api_server = DEFAULT_PISA_API_SERVER pisa_api_server = config.get("DEFAULT_PISA_API_SERVER")
pisa_api_port = DEFAULT_PISA_API_PORT pisa_api_port = config.get("DEFAULT_PISA_API_PORT")
commands = ["add_appointment", "get_appointment", "help"] commands = ["add_appointment", "get_appointment", "help"]
testing_commands = ["generate_dummy_appointment"] testing_commands = ["generate_dummy_appointment"]

13
apps/cli/sample_conf.py Normal file
View File

@@ -0,0 +1,13 @@
# PISA-SERVER
DEFAULT_PISA_API_SERVER = "btc.pisa.watch"
DEFAULT_PISA_API_PORT = 9814
# PISA-CLI
DATA_FOLDER = "~/.pisa_btc/"
CLIENT_LOG_FILE = "pisa-cli.log"
APPOINTMENTS_FOLDER_NAME = "appointment_receipts"
CLI_PUBLIC_KEY = "cli_pk.der"
CLI_PRIVATE_KEY = "cli_sk.der"
PISA_PUBLIC_KEY = "pisa_pk.der"

View File

@@ -30,14 +30,21 @@ def save_pk(pk, filename):
if __name__ == "__main__": if __name__ == "__main__":
name = "pisa" name = "pisa"
output_dir = "."
opts, _ = getopt(argv[1:], "n:", ["name"]) opts, _ = getopt(argv[1:], "n:d:", ["name", "dir"])
for opt, arg in opts: for opt, arg in opts:
if opt in ["-n", "--name"]: if opt in ["-n", "--name"]:
name = arg name = arg
SK_FILE_NAME = "../{}_sk.der".format(name) if opt in ["-d", "--dir"]:
PK_FILE_NAME = "../{}_pk.der".format(name) output_dir = arg
if output_dir.endswith("/"):
output_dir = output_dir[:-1]
SK_FILE_NAME = "{}/{}_sk.der".format(output_dir, name)
PK_FILE_NAME = "{}/{}_pk.der".format(output_dir, name)
if os.path.exists(SK_FILE_NAME): if os.path.exists(SK_FILE_NAME):
print('A key with name "{}" already exists. Aborting.'.format(SK_FILE_NAME)) print('A key with name "{}" already exists. Aborting.'.format(SK_FILE_NAME))

View File

@@ -1,8 +1,7 @@
import json import json
import logging
from datetime import datetime from datetime import datetime
from pisa import f_logger, c_logger
class _StructuredMessage: class _StructuredMessage:
def __init__(self, message, **kwargs): def __init__(self, message, **kwargs):
@@ -22,8 +21,10 @@ class Logger:
actor (:obj:`str`): the system actor that is logging the event (e.g. ``Watcher``, ``Cryptographer``, ...). actor (:obj:`str`): the system actor that is logging the event (e.g. ``Watcher``, ``Cryptographer``, ...).
""" """
def __init__(self, actor=None): def __init__(self, log_name_prefix, actor=None):
self.actor = actor self.actor = actor
self.f_logger = logging.getLogger("{}_file_log".format(log_name_prefix))
self.c_logger = logging.getLogger("{}_console_log".format(log_name_prefix))
def _add_prefix(self, msg): def _add_prefix(self, msg):
return msg if self.actor is None else "[{}]: {}".format(self.actor, msg) return msg if self.actor is None else "[{}]: {}".format(self.actor, msg)
@@ -54,8 +55,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output. kwargs: a ``key:value`` collection parameters to be added to the output.
""" """
f_logger.info(self._create_file_message(msg, **kwargs)) self.f_logger.info(self._create_file_message(msg, **kwargs))
c_logger.info(self._create_console_message(msg, **kwargs)) self.c_logger.info(self._create_console_message(msg, **kwargs))
def debug(self, msg, **kwargs): def debug(self, msg, **kwargs):
""" """
@@ -66,8 +67,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output. kwargs: a ``key:value`` collection parameters to be added to the output.
""" """
f_logger.debug(self._create_file_message(msg, **kwargs)) self.f_logger.debug(self._create_file_message(msg, **kwargs))
c_logger.debug(self._create_console_message(msg, **kwargs)) self.c_logger.debug(self._create_console_message(msg, **kwargs))
def error(self, msg, **kwargs): def error(self, msg, **kwargs):
""" """
@@ -78,8 +79,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output. kwargs: a ``key:value`` collection parameters to be added to the output.
""" """
f_logger.error(self._create_file_message(msg, **kwargs)) self.f_logger.error(self._create_file_message(msg, **kwargs))
c_logger.error(self._create_console_message(msg, **kwargs)) self.c_logger.error(self._create_console_message(msg, **kwargs))
def warning(self, msg, **kwargs): def warning(self, msg, **kwargs):
""" """
@@ -90,5 +91,5 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output. kwargs: a ``key:value`` collection parameters to be added to the output.
""" """
f_logger.warning(self._create_file_message(msg, **kwargs)) self.f_logger.warning(self._create_file_message(msg, **kwargs))
c_logger.warning(self._create_console_message(msg, **kwargs)) self.c_logger.warning(self._create_console_message(msg, **kwargs))

View File

@@ -1,4 +1,6 @@
import re import re
import os
import logging
from common.constants import LOCATOR_LEN_HEX from common.constants import LOCATOR_LEN_HEX
@@ -10,7 +12,7 @@ def check_sha256_hex_format(value):
value(:mod:`str`): the value to be checked. value(:mod:`str`): the value to be checked.
Returns: Returns:
:mod:`bool`: Whether or not the value matches the format. :obj:`bool`: Whether or not the value matches the format.
""" """
return isinstance(value, str) and re.match(r"^[0-9A-Fa-f]{64}$", value) is not None return isinstance(value, str) and re.match(r"^[0-9A-Fa-f]{64}$", value) is not None
@@ -23,7 +25,7 @@ def check_locator_format(value):
value(:mod:`str`): the value to be checked. value(:mod:`str`): the value to be checked.
Returns: Returns:
:mod:`bool`: Whether or not the value matches the format. :obj:`bool`: Whether or not the value matches the format.
""" """
return isinstance(value, str) and re.match(r"^[0-9A-Fa-f]{32}$", value) is not None return isinstance(value, str) and re.match(r"^[0-9A-Fa-f]{32}$", value) is not None
@@ -34,7 +36,113 @@ def compute_locator(tx_id):
Args: Args:
tx_id (:obj:`str`): the transaction id used to compute the locator. tx_id (:obj:`str`): the transaction id used to compute the locator.
Returns: Returns:
(:obj:`str`): The computed locator. :obj:`str`: The computed locator.
""" """
return tx_id[:LOCATOR_LEN_HEX] return tx_id[:LOCATOR_LEN_HEX]
def setup_data_folder(data_folder):
"""
Create a data folder for either the client or the server side if the folder does not exists.
Args:
data_folder (:obj:`str`): the path of the folder
"""
if not os.path.isdir(data_folder):
os.makedirs(data_folder, exist_ok=True)
def check_conf_fields(conf_fields):
"""
Checks that the provided configuration field have the right type.
Args:
conf_fields (:obj:`dict`): a dictionary populated with the configuration file params and the expected types.
The format is as follows:
{"field0": {"value": value_from_conf_file, "type": expected_type, ...}}
Returns:
:obj:`dict`: A dictionary with the same keys as the provided one, but containing only the "value" field as value
if the provided ``conf_fields`` where correct.
Raises:
ValueError: If any of the dictionary elements does not have the expected type
"""
conf_dict = {}
for field in conf_fields:
value = conf_fields[field]["value"]
correct_type = conf_fields[field]["type"]
if (value is not None) and isinstance(value, correct_type):
conf_dict[field] = value
else:
err_msg = "{} variable in config is of the wrong type".format(field)
raise ValueError(err_msg)
return conf_dict
def extend_paths(base_path, config_fields):
"""
Extends the relative paths of a given ``config_fields`` dictionary with a diven ``base_path``.
Paths in the config file are based on DATA_PATH, this method extends them so they are all absolute.
Args:
base_path (:obj:`str`): the base path to prepend the other paths.
config_fields (:obj:`dict`): a dictionary of configuration fields containing a ``path`` flag, as follows:
{"field0": {"value": value_from_conf_file, "path": True, ...}}
Returns:
:obj:`dict`: A ``config_fields`` with the flagged paths updated.
"""
for key, field in config_fields.items():
if field.get("path") is True:
config_fields[key]["value"] = base_path + config_fields[key]["value"]
return config_fields
def setup_logging(log_file_path, log_name_prefix):
"""
Setups a couple of loggers (console and file) given a prefix and a file path. The log names are:
prefix | _file_log and prefix | _console_log
Args:
log_file_path (:obj:`str`): the path of the file to output the file log.
log_name_prefix (:obj:`str`): the prefix to identify the log.
"""
if not isinstance(log_file_path, str):
print(log_file_path)
raise ValueError("Wrong log file path.")
if not isinstance(log_name_prefix, str):
raise ValueError("Wrong log file name.")
# Create the file logger
f_logger = logging.getLogger("{}_file_log".format(log_name_prefix))
f_logger.setLevel(logging.INFO)
fh = logging.FileHandler(log_file_path)
fh.setLevel(logging.INFO)
fh_formatter = logging.Formatter("%(message)s")
fh.setFormatter(fh_formatter)
f_logger.addHandler(fh)
# Create the console logger
c_logger = logging.getLogger("{}_console_log".format(log_name_prefix))
c_logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch_formatter = logging.Formatter("%(message)s.", "%Y-%m-%d %H:%M:%S")
ch.setFormatter(ch_formatter)
c_logger.addHandler(ch)

View File

@@ -1,27 +1,38 @@
import logging import os
from pisa.utils.auth_proxy import AuthServiceProxy
import pisa.conf as conf import pisa.conf as conf
from common.tools import check_conf_fields, setup_logging, extend_paths, setup_data_folder
from pisa.utils.auth_proxy import AuthServiceProxy
HOST = "localhost" HOST = "localhost"
PORT = 9814 PORT = 9814
LOG_PREFIX = "pisa"
# Create the file logger # Load config fields
f_logger = logging.getLogger("pisa_file_log") conf_fields = {
f_logger.setLevel(logging.INFO) "BTC_RPC_USER": {"value": conf.BTC_RPC_USER, "type": str},
"BTC_RPC_PASSWD": {"value": conf.BTC_RPC_PASSWD, "type": str},
"BTC_RPC_HOST": {"value": conf.BTC_RPC_HOST, "type": str},
"BTC_RPC_PORT": {"value": conf.BTC_RPC_PORT, "type": int},
"BTC_NETWORK": {"value": conf.BTC_NETWORK, "type": str},
"FEED_PROTOCOL": {"value": conf.FEED_PROTOCOL, "type": str},
"FEED_ADDR": {"value": conf.FEED_ADDR, "type": str},
"FEED_PORT": {"value": conf.FEED_PORT, "type": int},
"DATA_FOLDER": {"value": conf.DATA_FOLDER, "type": str},
"MAX_APPOINTMENTS": {"value": conf.MAX_APPOINTMENTS, "type": int},
"EXPIRY_DELTA": {"value": conf.EXPIRY_DELTA, "type": int},
"MIN_TO_SELF_DELAY": {"value": conf.MIN_TO_SELF_DELAY, "type": int},
"SERVER_LOG_FILE": {"value": conf.SERVER_LOG_FILE, "type": str, "path": True},
"PISA_SECRET_KEY": {"value": conf.PISA_SECRET_KEY, "type": str, "path": True},
"DB_PATH": {"value": conf.DB_PATH, "type": str, "path": True},
}
fh = logging.FileHandler(conf.SERVER_LOG_FILE) # Expand user (~) if found and check fields are correct
fh.setLevel(logging.INFO) conf_fields["DATA_FOLDER"]["value"] = os.path.expanduser(conf_fields["DATA_FOLDER"]["value"])
fh_formatter = logging.Formatter("%(message)s") # Extend relative paths
fh.setFormatter(fh_formatter) conf_fields = extend_paths(conf_fields["DATA_FOLDER"]["value"], conf_fields)
f_logger.addHandler(fh)
# Create the console logger # Sanity check fields and build config dictionary
c_logger = logging.getLogger("pisa_console_log") config = check_conf_fields(conf_fields)
c_logger.setLevel(logging.INFO)
ch = logging.StreamHandler() setup_data_folder(config.get("DATA_FOLDER"))
ch.setLevel(logging.INFO) setup_logging(config.get("SERVER_LOG_FILE"), LOG_PREFIX)
ch_formatter = logging.Formatter("%(message)s.", "%Y-%m-%d %H:%M:%S")
ch.setFormatter(ch_formatter)
c_logger.addHandler(ch)

View File

@@ -1,8 +1,9 @@
import os import os
import json import json
import logging
from flask import Flask, request, abort, jsonify from flask import Flask, request, abort, jsonify
from pisa import HOST, PORT, logging from pisa import HOST, PORT, LOG_PREFIX
from common.logger import Logger from common.logger import Logger
from pisa.inspector import Inspector from pisa.inspector import Inspector
from common.appointment import Appointment from common.appointment import Appointment
@@ -13,7 +14,7 @@ from common.constants import HTTP_OK, HTTP_BAD_REQUEST, HTTP_SERVICE_UNAVAILABLE
# ToDo: #5-add-async-to-api # ToDo: #5-add-async-to-api
app = Flask(__name__) app = Flask(__name__)
logger = Logger("API") logger = Logger(actor="API", log_name_prefix=LOG_PREFIX)
class API: class API:

View File

@@ -1,8 +1,10 @@
from common.logger import Logger from common.logger import Logger
from pisa import LOG_PREFIX
from pisa.tools import bitcoin_cli from pisa.tools import bitcoin_cli
from pisa.utils.auth_proxy import JSONRPCException from pisa.utils.auth_proxy import JSONRPCException
logger = Logger("BlockProcessor") logger = Logger(actor="BlockProcessor", log_name_prefix=LOG_PREFIX)
class BlockProcessor: class BlockProcessor:

View File

@@ -1,10 +1,11 @@
from pisa import LOG_PREFIX
from pisa.rpc_errors import * from pisa.rpc_errors import *
from common.logger import Logger from common.logger import Logger
from pisa.tools import bitcoin_cli from pisa.tools import bitcoin_cli
from pisa.utils.auth_proxy import JSONRPCException from pisa.utils.auth_proxy import JSONRPCException
from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION, RPC_TX_REORGED_AFTER_BROADCAST from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION, RPC_TX_REORGED_AFTER_BROADCAST
logger = Logger("Carrier") logger = Logger(actor="Carrier", log_name_prefix=LOG_PREFIX)
# FIXME: This class is not fully covered by unit tests # FIXME: This class is not fully covered by unit tests

View File

@@ -2,11 +2,12 @@ import zmq
import binascii import binascii
from threading import Thread, Event, Condition from threading import Thread, Event, Condition
from pisa import LOG_PREFIX
from common.logger import Logger from common.logger import Logger
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT, POLLING_DELTA, BLOCK_WINDOW_SIZE from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT, POLLING_DELTA, BLOCK_WINDOW_SIZE
from pisa.block_processor import BlockProcessor from pisa.block_processor import BlockProcessor
logger = Logger("ChainMonitor") logger = Logger(actor="ChainMonitor", log_name_prefix=LOG_PREFIX)
class ChainMonitor: class ChainMonitor:

View File

@@ -1,6 +1,8 @@
from pisa import LOG_PREFIX
from common.logger import Logger from common.logger import Logger
logger = Logger("Cleaner") logger = Logger(actor="Cleaner", log_name_prefix=LOG_PREFIX)
class Cleaner: class Cleaner:

View File

@@ -1,9 +1,11 @@
import json import json
import plyvel import plyvel
from pisa import LOG_PREFIX
from common.logger import Logger from common.logger import Logger
logger = Logger("DBManager") logger = Logger(actor="DBManager", log_name_prefix=LOG_PREFIX)
WATCHER_PREFIX = "w" WATCHER_PREFIX = "w"
WATCHER_LAST_BLOCK_KEY = "bw" WATCHER_LAST_BLOCK_KEY = "bw"

View File

@@ -4,12 +4,12 @@ from binascii import unhexlify
from common.constants import LOCATOR_LEN_HEX from common.constants import LOCATOR_LEN_HEX
from common.cryptographer import Cryptographer from common.cryptographer import Cryptographer
from pisa import errors from pisa import errors, LOG_PREFIX
from common.logger import Logger from common.logger import Logger
from common.appointment import Appointment from common.appointment import Appointment
from pisa.block_processor import BlockProcessor from pisa.block_processor import BlockProcessor
logger = Logger("Inspector") logger = Logger(actor="Inspector", log_name_prefix=LOG_PREFIX)
# FIXME: The inspector logs the wrong messages sent form the users. A possible attack surface would be to send a really # FIXME: The inspector logs the wrong messages sent form the users. A possible attack surface would be to send a really
# long field that, even if not accepted by PISA, would be stored in the logs. This is a possible DoS surface # long field that, even if not accepted by PISA, would be stored in the logs. This is a possible DoS surface

View File

@@ -3,16 +3,17 @@ from sys import argv, exit
from signal import signal, SIGINT, SIGQUIT, SIGTERM from signal import signal, SIGINT, SIGQUIT, SIGTERM
from common.logger import Logger from common.logger import Logger
from pisa import config, LOG_PREFIX
from pisa.api import API from pisa.api import API
from pisa.watcher import Watcher from pisa.watcher import Watcher
from pisa.builder import Builder from pisa.builder import Builder
import pisa.conf as conf
from pisa.db_manager import DBManager from pisa.db_manager import DBManager
from pisa.chain_monitor import ChainMonitor from pisa.chain_monitor import ChainMonitor
from pisa.block_processor import BlockProcessor from pisa.block_processor import BlockProcessor
from pisa.tools import can_connect_to_bitcoind, in_correct_network from pisa.tools import can_connect_to_bitcoind, in_correct_network
logger = Logger("Daemon") logger = Logger(actor="Daemon", log_name_prefix=LOG_PREFIX)
def handle_signals(signal_received, frame): def handle_signals(signal_received, frame):
@@ -24,72 +25,20 @@ def handle_signals(signal_received, frame):
exit(0) exit(0)
def load_config(config): def main():
""" global db_manager, chain_monitor
Looks through all of the config options to make sure they contain the right type of data and builds a config
dictionary.
Args:
config (:obj:`module`): It takes in a config module object.
Returns:
:obj:`dict` A dictionary containing the config values.
"""
conf_dict = {}
conf_fields = {
"BTC_RPC_USER": {"value": config.BTC_RPC_USER, "type": str},
"BTC_RPC_PASSWD": {"value": config.BTC_RPC_PASSWD, "type": str},
"BTC_RPC_HOST": {"value": config.BTC_RPC_HOST, "type": str},
"BTC_RPC_PORT": {"value": config.BTC_RPC_PORT, "type": int},
"BTC_NETWORK": {"value": config.BTC_NETWORK, "type": str},
"FEED_PROTOCOL": {"value": config.FEED_PROTOCOL, "type": str},
"FEED_ADDR": {"value": config.FEED_ADDR, "type": str},
"FEED_PORT": {"value": config.FEED_PORT, "type": int},
"MAX_APPOINTMENTS": {"value": config.MAX_APPOINTMENTS, "type": int},
"EXPIRY_DELTA": {"value": config.EXPIRY_DELTA, "type": int},
"MIN_TO_SELF_DELAY": {"value": config.MIN_TO_SELF_DELAY, "type": int},
"SERVER_LOG_FILE": {"value": config.SERVER_LOG_FILE, "type": str},
"PISA_SECRET_KEY": {"value": config.PISA_SECRET_KEY, "type": str},
"CLIENT_LOG_FILE": {"value": config.CLIENT_LOG_FILE, "type": str},
"TEST_LOG_FILE": {"value": config.TEST_LOG_FILE, "type": str},
"DB_PATH": {"value": config.DB_PATH, "type": str},
}
for field in conf_fields:
value = conf_fields[field]["value"]
correct_type = conf_fields[field]["type"]
if (value is not None) and isinstance(value, correct_type):
conf_dict[field] = value
else:
err_msg = "{} variable in config is of the wrong type".format(field)
logger.error(err_msg)
raise ValueError(err_msg)
return conf_dict
if __name__ == "__main__":
logger.info("Starting PISA")
signal(SIGINT, handle_signals) signal(SIGINT, handle_signals)
signal(SIGTERM, handle_signals) signal(SIGTERM, handle_signals)
signal(SIGQUIT, handle_signals) signal(SIGQUIT, handle_signals)
opts, _ = getopt(argv[1:], "", [""]) logger.info("Starting PISA")
for opt, arg in opts: db_manager = DBManager(config.get("DB_PATH"))
# FIXME: Leaving this here for future option/arguments
pass
pisa_config = load_config(conf)
db_manager = DBManager(pisa_config.get("DB_PATH"))
if not can_connect_to_bitcoind(): if not can_connect_to_bitcoind():
logger.error("Can't connect to bitcoind. Shutting down") logger.error("Can't connect to bitcoind. Shutting down")
elif not in_correct_network(pisa_config.get("BTC_NETWORK")): elif not in_correct_network(config.get("BTC_NETWORK")):
logger.error("bitcoind is running on a different network, check conf.py and bitcoin.conf. Shutting down") logger.error("bitcoind is running on a different network, check conf.py and bitcoin.conf. Shutting down")
else: else:
@@ -101,10 +50,10 @@ if __name__ == "__main__":
watcher_appointments_data = db_manager.load_watcher_appointments() watcher_appointments_data = db_manager.load_watcher_appointments()
responder_trackers_data = db_manager.load_responder_trackers() responder_trackers_data = db_manager.load_responder_trackers()
with open(pisa_config.get("PISA_SECRET_KEY"), "rb") as key_file: with open(config.get("PISA_SECRET_KEY"), "rb") as key_file:
secret_key_der = key_file.read() secret_key_der = key_file.read()
watcher = Watcher(db_manager, chain_monitor, secret_key_der, pisa_config) watcher = Watcher(db_manager, chain_monitor, secret_key_der, config)
chain_monitor.attach_watcher(watcher.block_queue, watcher.asleep) chain_monitor.attach_watcher(watcher.block_queue, watcher.asleep)
chain_monitor.attach_responder(watcher.responder.block_queue, watcher.responder.asleep) chain_monitor.attach_responder(watcher.responder.block_queue, watcher.responder.asleep)
@@ -150,8 +99,17 @@ if __name__ == "__main__":
watcher.block_queue = Builder.build_block_queue(missed_blocks_watcher) watcher.block_queue = Builder.build_block_queue(missed_blocks_watcher)
# Fire the API # Fire the API
API(watcher, config=pisa_config).start() API(watcher, config=config).start()
except Exception as e: except Exception as e:
logger.error("An error occurred: {}. Shutting down".format(e)) logger.error("An error occurred: {}. Shutting down".format(e))
exit(1) exit(1)
if __name__ == "__main__":
opts, _ = getopt(argv[1:], "", [""])
for opt, arg in opts:
# FIXME: Leaving this here for future option/arguments
pass
main()

View File

@@ -2,6 +2,7 @@ import json
from queue import Queue from queue import Queue
from threading import Thread from threading import Thread
from pisa import LOG_PREFIX
from common.logger import Logger from common.logger import Logger
from pisa.cleaner import Cleaner from pisa.cleaner import Cleaner
from pisa.carrier import Carrier from pisa.carrier import Carrier
@@ -10,7 +11,7 @@ from pisa.block_processor import BlockProcessor
CONFIRMATIONS_BEFORE_RETRY = 6 CONFIRMATIONS_BEFORE_RETRY = 6
MIN_CONFIRMATIONS = 6 MIN_CONFIRMATIONS = 6
logger = Logger("Responder") logger = Logger(actor="Responder", log_name_prefix=LOG_PREFIX)
class TransactionTracker: class TransactionTracker:

View File

@@ -5,27 +5,22 @@ BTC_RPC_HOST = "localhost"
BTC_RPC_PORT = 18443 BTC_RPC_PORT = 18443
BTC_NETWORK = "regtest" BTC_NETWORK = "regtest"
# CHAIN MONITOR
POLLING_DELTA = 60
BLOCK_WINDOW_SIZE = 10
# ZMQ # ZMQ
FEED_PROTOCOL = "tcp" FEED_PROTOCOL = "tcp"
FEED_ADDR = "127.0.0.1" FEED_ADDR = "127.0.0.1"
FEED_PORT = 28332 FEED_PORT = 28332
# PISA # PISA
DATA_FOLDER = "~/.pisa_btc/"
MAX_APPOINTMENTS = 100 MAX_APPOINTMENTS = 100
EXPIRY_DELTA = 6 EXPIRY_DELTA = 6
MIN_TO_SELF_DELAY = 20 MIN_TO_SELF_DELAY = 20
SERVER_LOG_FILE = "pisa.log" SERVER_LOG_FILE = "pisa.log"
PISA_SECRET_KEY = "pisa_sk.der" PISA_SECRET_KEY = "pisa_sk.der"
# PISA-CLI # CHAIN MONITOR
CLIENT_LOG_FILE = "pisa.log" POLLING_DELTA = 60
BLOCK_WINDOW_SIZE = 10
# TEST
TEST_LOG_FILE = "test.log"
# LEVELDB # LEVELDB
DB_PATH = "appointments" DB_PATH = "appointments"

View File

@@ -8,11 +8,12 @@ from common.tools import compute_locator
from common.logger import Logger from common.logger import Logger
from pisa import LOG_PREFIX
from pisa.cleaner import Cleaner from pisa.cleaner import Cleaner
from pisa.responder import Responder from pisa.responder import Responder
from pisa.block_processor import BlockProcessor from pisa.block_processor import BlockProcessor
logger = Logger("Watcher") logger = Logger(actor="Watcher", log_name_prefix=LOG_PREFIX)
class Watcher: class Watcher:

View File

@@ -154,12 +154,13 @@ def test_load_key_file_data():
def test_save_signed_appointment(monkeypatch): def test_save_signed_appointment(monkeypatch):
monkeypatch.setattr(pisa_cli, "APPOINTMENTS_FOLDER_NAME", "test_appointments") appointments_folder = "test_appointments_receipts"
pisa_cli.config["APPOINTMENTS_FOLDER_NAME"] = appointments_folder
pisa_cli.save_signed_appointment(dummy_appointment.to_dict(), get_dummy_signature()) pisa_cli.save_signed_appointment(dummy_appointment.to_dict(), get_dummy_signature())
# In folder "Appointments," grab all files and print them. # In folder "Appointments," grab all files and print them.
files = os.listdir("test_appointments") files = os.listdir(appointments_folder)
found = False found = False
for f in files: for f in files:
@@ -169,10 +170,10 @@ def test_save_signed_appointment(monkeypatch):
assert found assert found
# If "appointments" directory doesn't exist, function should create it. # If "appointments" directory doesn't exist, function should create it.
assert os.path.exists("test_appointments") assert os.path.exists(appointments_folder)
# Delete test directory once we're done. # Delete test directory once we're done.
shutil.rmtree("test_appointments") shutil.rmtree(appointments_folder)
def test_parse_add_appointment_args(): def test_parse_add_appointment_args():

View File

@@ -3,7 +3,6 @@ import struct
import binascii import binascii
from pytest import fixture from pytest import fixture
from pisa import c_logger
from common.appointment import Appointment from common.appointment import Appointment
from pisa.encrypted_blob import EncryptedBlob from pisa.encrypted_blob import EncryptedBlob
@@ -12,9 +11,6 @@ from test.pisa.unit.conftest import get_random_value_hex
from common.constants import LOCATOR_LEN_BYTES from common.constants import LOCATOR_LEN_BYTES
c_logger.disabled = True
# Not much to test here, adding it for completeness # Not much to test here, adding it for completeness
@fixture @fixture
def appointment_data(): def appointment_data():

View File

@@ -1,7 +1,26 @@
from common.tools import check_sha256_hex_format import os
import pytest
import logging
from copy import deepcopy
from pisa import conf_fields
from common.constants import LOCATOR_LEN_BYTES
from common.tools import (
check_sha256_hex_format,
check_locator_format,
compute_locator,
setup_data_folder,
check_conf_fields,
extend_paths,
setup_logging,
)
from test.common.unit.conftest import get_random_value_hex from test.common.unit.conftest import get_random_value_hex
conf_fields_copy = deepcopy(conf_fields)
def test_check_sha256_hex_format(): def test_check_sha256_hex_format():
# Only 32-byte hex encoded strings should pass the test # Only 32-byte hex encoded strings should pass the test
wrong_inputs = [None, str(), 213, 46.67, dict(), "A" * 63, "C" * 65, bytes(), get_random_value_hex(31)] wrong_inputs = [None, str(), 213, 46.67, dict(), "A" * 63, "C" * 65, bytes(), get_random_value_hex(31)]
@@ -10,3 +29,98 @@ def test_check_sha256_hex_format():
for v in range(100): for v in range(100):
assert check_sha256_hex_format(get_random_value_hex(32)) is True assert check_sha256_hex_format(get_random_value_hex(32)) is True
def test_check_locator_format():
# Check that only LOCATOR_LEN_BYTES long string pass the test
wrong_inputs = [
None,
str(),
213,
46.67,
dict(),
"A" * (2 * LOCATOR_LEN_BYTES - 1),
"C" * (2 * LOCATOR_LEN_BYTES + 1),
bytes(),
get_random_value_hex(LOCATOR_LEN_BYTES - 1),
]
for wtype in wrong_inputs:
assert check_sha256_hex_format(wtype) is False
for _ in range(100):
assert check_locator_format(get_random_value_hex(LOCATOR_LEN_BYTES)) is True
def test_compute_locator():
# The best way of checking that compute locator is correct is by using check_locator_format
for _ in range(100):
assert check_locator_format(compute_locator(get_random_value_hex(LOCATOR_LEN_BYTES))) is True
# String of length smaller than LOCATOR_LEN_BYTES bytes must fail
for i in range(1, LOCATOR_LEN_BYTES):
assert check_locator_format(compute_locator(get_random_value_hex(i))) is False
def test_setup_data_folder():
# This method should create a folder if it does not exist, and do nothing otherwise
test_folder = "test_folder"
assert not os.path.isdir(test_folder)
setup_data_folder(test_folder)
assert os.path.isdir(test_folder)
os.rmdir(test_folder)
def test_check_conf_fields():
# The test should work with a valid config_fields (obtained from a valid conf.py)
assert type(check_conf_fields(conf_fields_copy)) == dict
def test_bad_check_conf_fields():
# Create a messed up version of the file that should throw an error.
conf_fields_copy["BTC_RPC_USER"] = 0000
conf_fields_copy["BTC_RPC_PASSWD"] = "password"
conf_fields_copy["BTC_RPC_HOST"] = 000
# We should get a ValueError here.
with pytest.raises(Exception):
check_conf_fields(conf_fields_copy)
def test_extend_paths():
# Test that only items with the path flag are extended
config_fields = {
"foo": {"value": "foofoo"},
"var": {"value": "varvar", "path": True},
"foovar": {"value": "foovarfoovar"},
}
base_path = "base_path/"
extended_config_field = extend_paths(base_path, config_fields)
for k, field in extended_config_field.items():
if field.get("path") is True:
assert base_path in field.get("value")
else:
assert base_path not in field.get("value")
def test_setup_logging():
# Check that setup_logging creates two new logs for every prefix
prefix = "foo"
log_file = "var.log"
f_log_suffix = "_file_log"
c_log_suffix = "_console_log"
assert len(logging.getLogger(prefix + f_log_suffix).handlers) is 0
assert len(logging.getLogger(prefix + c_log_suffix).handlers) is 0
setup_logging(log_file, prefix)
assert len(logging.getLogger(prefix + f_log_suffix).handlers) is 1
assert len(logging.getLogger(prefix + c_log_suffix).handlers) is 1
os.remove(log_file)

View File

@@ -1,8 +1,10 @@
import pytest import pytest
import random import random
from multiprocessing import Process
from decimal import Decimal, getcontext from decimal import Decimal, getcontext
import pisa.conf as conf import pisa.conf as conf
from pisa.pisad import main
from pisa.utils.auth_proxy import AuthServiceProxy from pisa.utils.auth_proxy import AuthServiceProxy
getcontext().prec = 10 getcontext().prec = 10
@@ -48,6 +50,13 @@ def create_txs(bitcoin_cli):
return signed_commitment_tx, signed_penalty_tx return signed_commitment_tx, signed_penalty_tx
def run_pisad():
pisad_process = Process(target=main, daemon=True)
pisad_process.start()
return pisad_process
def get_random_value_hex(nbytes): def get_random_value_hex(nbytes):
pseudo_random_value = random.getrandbits(8 * nbytes) pseudo_random_value = random.getrandbits(8 * nbytes)
prv_hex = "{:x}".format(pseudo_random_value) prv_hex = "{:x}".format(pseudo_random_value)

View File

@@ -5,27 +5,22 @@ BTC_RPC_HOST = "localhost"
BTC_RPC_PORT = 18445 BTC_RPC_PORT = 18445
BTC_NETWORK = "regtest" BTC_NETWORK = "regtest"
# CHAIN MONITOR
POLLING_DELTA = 60
BLOCK_WINDOW_SIZE = 10
# ZMQ # ZMQ
FEED_PROTOCOL = "tcp" FEED_PROTOCOL = "tcp"
FEED_ADDR = "127.0.0.1" FEED_ADDR = "127.0.0.1"
FEED_PORT = 28335 FEED_PORT = 28335
# PISA # PISA
DATA_FOLDER = "~/.pisa_btc/"
MAX_APPOINTMENTS = 100 MAX_APPOINTMENTS = 100
EXPIRY_DELTA = 6 EXPIRY_DELTA = 6
MIN_TO_SELF_DELAY = 20 MIN_TO_SELF_DELAY = 20
SERVER_LOG_FILE = "pisa.log" SERVER_LOG_FILE = "pisa.log"
PISA_SECRET_KEY = "pisa_sk.der" PISA_SECRET_KEY = "pisa_sk.der"
# PISA-CLI # CHAIN MONITOR
CLIENT_LOG_FILE = "pisa.log" POLLING_DELTA = 60
BLOCK_WINDOW_SIZE = 10
# TEST
TEST_LOG_FILE = "test.log"
# LEVELDB # LEVELDB
DB_PATH = "appointments" DB_PATH = "appointments"

View File

@@ -9,13 +9,22 @@ from common.tools import compute_locator
from common.appointment import Appointment from common.appointment import Appointment
from common.cryptographer import Cryptographer from common.cryptographer import Cryptographer
from pisa.utils.auth_proxy import JSONRPCException from pisa.utils.auth_proxy import JSONRPCException
from test.pisa.e2e.conftest import END_TIME_DELTA, build_appointment_data, get_random_value_hex, create_penalty_tx from test.pisa.e2e.conftest import (
END_TIME_DELTA,
build_appointment_data,
get_random_value_hex,
create_penalty_tx,
run_pisad,
)
# We'll use pisa_cli to add appointments. The expected input format is a list of arguments with a json-encoded # We'll use pisa_cli to add appointments. The expected input format is a list of arguments with a json-encoded
# appointment # appointment
pisa_cli.pisa_api_server = HOST pisa_cli.pisa_api_server = HOST
pisa_cli.pisa_api_port = PORT pisa_cli.pisa_api_port = PORT
# Run pisad
pisad_process = run_pisad()
def broadcast_transaction_and_mine_block(bitcoin_cli, commitment_tx, addr): def broadcast_transaction_and_mine_block(bitcoin_cli, commitment_tx, addr):
# Broadcast the commitment transaction and mine a block # Broadcast the commitment transaction and mine a block

View File

@@ -1,3 +1,4 @@
import os
import pytest import pytest
import random import random
import requests import requests
@@ -161,6 +162,7 @@ def generate_dummy_tracker():
def get_config(): def get_config():
data_folder = os.path.expanduser("~/.pisa_btc")
config = { config = {
"BTC_RPC_USER": "username", "BTC_RPC_USER": "username",
"BTC_RPC_PASSWD": "password", "BTC_RPC_PASSWD": "password",
@@ -170,13 +172,12 @@ def get_config():
"FEED_PROTOCOL": "tcp", "FEED_PROTOCOL": "tcp",
"FEED_ADDR": "127.0.0.1", "FEED_ADDR": "127.0.0.1",
"FEED_PORT": 28332, "FEED_PORT": 28332,
"DATA_FOLDER": data_folder,
"MAX_APPOINTMENTS": 100, "MAX_APPOINTMENTS": 100,
"EXPIRY_DELTA": 6, "EXPIRY_DELTA": 6,
"MIN_TO_SELF_DELAY": 20, "MIN_TO_SELF_DELAY": 20,
"SERVER_LOG_FILE": "pisa.log", "SERVER_LOG_FILE": data_folder + "pisa.log",
"PISA_SECRET_KEY": "pisa_sk.der", "PISA_SECRET_KEY": data_folder + "pisa_sk.der",
"CLIENT_LOG_FILE": "pisa.log",
"TEST_LOG_FILE": "test.log",
"DB_PATH": "appointments", "DB_PATH": "appointments",
} }

View File

@@ -1,51 +0,0 @@
import importlib
import os
import pytest
from shutil import copyfile
from pisa.pisad import load_config
test_conf_file_path = os.getcwd() + "/test/pisa/unit/test_conf.py"
def test_load_config():
# Copy the sample-conf.py file to use as a test config file.
copyfile(os.getcwd() + "/pisa/sample_conf.py", test_conf_file_path)
import test.pisa.unit.test_conf as conf
# If the file has all the correct fields and data, it should return a dict.
conf_dict = load_config(conf)
assert type(conf_dict) == dict
# Delete the file.
os.remove(test_conf_file_path)
def test_bad_load_config():
# Create a messed up version of the file that should throw an error.
with open(test_conf_file_path, "w") as f:
f.write('# bitcoind\nBTC_RPC_USER = 0000\nBTC_RPC_PASSWD = "password"\nBTC_RPC_HOST = 000')
import test.pisa.unit.test_conf as conf
importlib.reload(conf)
with pytest.raises(Exception):
conf_dict = load_config(conf)
os.remove(test_conf_file_path)
def test_empty_load_config():
# Create an empty version of the file that should throw an error.
open(test_conf_file_path, "a")
import test.pisa.unit.test_conf as conf
importlib.reload(conf)
with pytest.raises(Exception):
conf_dict = load_config(conf)
os.remove(test_conf_file_path)