diff --git a/apps/cli/__init__.py b/apps/cli/__init__.py index 1ccaf9f..2c9e149 100644 --- a/apps/cli/__init__.py +++ b/apps/cli/__init__.py @@ -1,5 +1,4 @@ import logging -from apps.cli.logger import Logger # PISA-SERVER DEFAULT_PISA_API_SERVER = "btc.pisa.watch" @@ -13,9 +12,22 @@ CLI_PUBLIC_KEY = "cli_pk.der" CLI_PRIVATE_KEY = "cli_sk.der" PISA_PUBLIC_KEY = "pisa_pk.der" -# Configure logging -logging.basicConfig( - format="%(message)s", level=logging.INFO, handlers=[logging.FileHandler(CLIENT_LOG_FILE), logging.StreamHandler()] -) +# Create the file logger +f_logger = logging.getLogger("cli_file_log") +f_logger.setLevel(logging.INFO) -logger = Logger("Client") +fh = logging.FileHandler(CLIENT_LOG_FILE) +fh.setLevel(logging.INFO) +fh_formatter = logging.Formatter("%(message)s") +fh.setFormatter(fh_formatter) +f_logger.addHandler(fh) + +# Create the console logger +c_logger = logging.getLogger("cli_console_log") +c_logger.setLevel(logging.INFO) + +ch = logging.StreamHandler() +ch.setLevel(logging.INFO) +ch_formatter = logging.Formatter("%(asctime)s %(message)s.", "%Y-%m-%d %H:%M:%S") +ch.setFormatter(ch_formatter) +c_logger.addHandler(ch) diff --git a/apps/cli/blob.py b/apps/cli/blob.py index 0d26a48..d5c7c14 100644 --- a/apps/cli/blob.py +++ b/apps/cli/blob.py @@ -1,9 +1,4 @@ import re -from hashlib import sha256 -from binascii import hexlify, unhexlify -from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 - -from apps.cli import logger class Blob: diff --git a/apps/cli/logger.py b/apps/cli/logger.py deleted file mode 100644 index 1b3f4d3..0000000 --- a/apps/cli/logger.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging -import json -import time - - -class StructuredMessage(object): - def __init__(self, message, **kwargs): - self.message = message - self.time = time.asctime() - self.kwargs = kwargs - - def __str__(self): - return json.dumps({**self.kwargs, "message": self.message, "time": self.time}) - - -class Logger(object): - def __init__(self, actor=None): - self.actor = actor - - def _add_prefix(self, msg): - return msg if self.actor is None else "[{}] {}".format(self.actor, msg) - - def info(self, msg, **kwargs): - logging.info(StructuredMessage(self._add_prefix(msg), actor=self.actor, **kwargs)) - - def debug(self, msg, **kwargs): - logging.debug(StructuredMessage(self._add_prefix(msg), actor=self.actor, **kwargs)) - - def error(self, msg, **kwargs): - logging.error(StructuredMessage(self._add_prefix(msg), actor=self.actor, **kwargs)) - - def warning(self, msg, **kwargs): - logging.warning(StructuredMessage(self._add_prefix(msg), actor=self.actor, **kwargs)) diff --git a/apps/cli/pisa_cli.py b/apps/cli/pisa_cli.py index 8bd0335..c793b96 100644 --- a/apps/cli/pisa_cli.py +++ b/apps/cli/pisa_cli.py @@ -3,6 +3,7 @@ import sys import json import requests import time +import binascii from sys import argv from getopt import getopt, GetoptError from requests import ConnectTimeout, ConnectionError @@ -17,15 +18,16 @@ from apps.cli import ( CLI_PRIVATE_KEY, PISA_PUBLIC_KEY, APPOINTMENTS_FOLDER_NAME, - logger, ) +from common.logger import Logger from common.constants import LOCATOR_LEN_HEX from common.cryptographer import Cryptographer from common.tools import check_sha256_hex_format HTTP_OK = 200 +logger = Logger("Client") # FIXME: TESTING ENDPOINT, WON'T BE THERE IN PRODUCTION @@ -83,7 +85,7 @@ def save_signed_appointment(appointment, signature): def add_appointment(args): appointment_data = None - use_help = "Use 'help add_appointment' for help of how to use the command." + use_help = "Use 'help add_appointment' for help of how to use the command" if not args: logger.error("No appointment data provided. " + use_help) @@ -98,7 +100,7 @@ def add_appointment(args): if arg_opt in ["-f", "--file"]: fin = args.pop(0) if not os.path.isfile(fin): - logger.error("Can't find file " + fin) + logger.error("Can't find file", filename=fin) return False try: @@ -106,7 +108,7 @@ def add_appointment(args): appointment_data = json.load(f) except IOError as e: - logger.error("I/O error({}): {}".format(e.errno, e.strerror)) + logger.error("I/O error", errno=e.errno, error=e.strerror) return False else: appointment_data = json.loads(arg_opt) @@ -116,13 +118,13 @@ def add_appointment(args): return False if not appointment_data: - logger.error("The provided JSON is empty.") + logger.error("The provided JSON is empty") return False valid_locator = check_sha256_hex_format(appointment_data.get("tx_id")) if not valid_locator: - logger.error("The provided locator is not valid.") + logger.error("The provided locator is not valid") return False add_appointment_endpoint = "http://{}:{}".format(pisa_api_server, pisa_api_port) @@ -139,31 +141,34 @@ def add_appointment(args): cli_sk = Cryptographer.load_private_key_der(sk_der) except ValueError: - logger.error("Failed to deserialize the public key. It might be in an unsupported format.") + logger.error("Failed to deserialize the public key. It might be in an unsupported format") return False except FileNotFoundError: - logger.error("Client's private key file not found. Please check your settings.") + logger.error("Client's private key file not found. Please check your settings") return False except IOError as e: - logger.error("I/O error({}): {}".format(e.errno, e.strerror)) + logger.error("I/O error", errno=e.errno, error=e.strerror) return False signature = Cryptographer.sign(Cryptographer.signature_format(appointment), cli_sk) try: cli_pk_der = load_key_file_data(CLI_PUBLIC_KEY) + hex_pk_der = binascii.hexlify(cli_pk_der) except FileNotFoundError: - logger.error("Client's public key file not found. Please check your settings.") + logger.error("Client's public key file not found. Please check your settings") return False except IOError as e: - logger.error("I/O error({}): {}".format(e.errno, e.strerror)) + logger.error("I/O error", errno=e.errno, error=e.strerror) return False - data = {"appointment": appointment, "signature": signature, "public_key": cli_pk_der.decode("utf-8")} + # FIXME: Exceptions for hexlify need to be covered + + data = {"appointment": appointment, "signature": signature, "public_key": hex_pk_der.decode("utf-8")} appointment_json = json.dumps(data, sort_keys=True, separators=(",", ":")) @@ -175,29 +180,31 @@ def add_appointment(args): response_json = r.json() except json.JSONDecodeError: - logger.error("The response was not valid JSON.") + logger.error("The response was not valid JSON") return False except ConnectTimeout: - logger.error("Can't connect to pisa API. Connection timeout.") + logger.error("Can't connect to pisa API. Connection timeout") return False except ConnectionError: - logger.error("Can't connect to pisa API. Server cannot be reached.") + logger.error("Can't connect to pisa API. Server cannot be reached") return False if r.status_code != HTTP_OK: if "error" not in response_json: - logger.error("The server returned status code {}, but no error description.".format(r.status_code)) + logger.error("The server returned an error status code but no error description", status_code=r.status_code) else: error = response_json["error"] logger.error( - "The server returned status code {}, and the following error: {}.".format(r.status_code, error) + "The server returned an error status code with an error description", + status_code=r.status_code, + description=error, ) return False if "signature" not in response_json: - logger.error("The response does not contain the signature of the appointment.") + logger.error("The response does not contain the signature of the appointment") return False signature = response_json["signature"] @@ -207,30 +214,30 @@ def add_appointment(args): pisa_pk = Cryptographer.load_public_key_der(pisa_pk_der) if pisa_pk is None: - logger.error("Failed to deserialize the public key. It might be in an unsupported format.") + logger.error("Failed to deserialize the public key. It might be in an unsupported format") return False is_sig_valid = Cryptographer.verify(Cryptographer.signature_format(appointment), signature, pisa_pk) except FileNotFoundError: - logger.error("Pisa's public key file not found. Please check your settings.") + logger.error("Pisa's public key file not found. Please check your settings") return False except IOError as e: - logger.error("I/O error({}): {}".format(e.errno, e.strerror)) + logger.error("I/O error", errno=e.errno, error=e.strerror) return False if not is_sig_valid: - logger.error("The returned appointment's signature is invalid.") + logger.error("The returned appointment's signature is invalid") return False - logger.info("Appointment accepted and signed by Pisa.") + logger.info("Appointment accepted and signed by Pisa") # all good, store appointment and signature try: save_signed_appointment(appointment, signature) except OSError as e: - logger.error("There was an error while saving the appointment: {}".format(e)) + logger.error("There was an error while saving the appointment", error=e) return False return True @@ -238,7 +245,7 @@ def add_appointment(args): def get_appointment(args): if not args: - logger.error("No arguments were given.") + logger.error("No arguments were given") return False arg_opt = args.pop(0) @@ -250,7 +257,7 @@ def get_appointment(args): valid_locator = check_sha256_hex_format(locator) if not valid_locator: - logger.error("The provided locator is not valid: {}".format(locator)) + logger.error("The provided locator is not valid", locator=locator) return False get_appointment_endpoint = "http://{}:{}/get_appointment".format(pisa_api_server, pisa_api_port) @@ -261,11 +268,11 @@ def get_appointment(args): print(json.dumps(r.json(), indent=4, sort_keys=True)) except ConnectTimeout: - logger.error("Can't connect to pisa API. Connection timeout.") + logger.error("Can't connect to pisa API. Connection timeout") return False except ConnectionError: - logger.error("Can't connect to pisa API. Server cannot be reached.") + logger.error("Can't connect to pisa API. Server cannot be reached") return False return True @@ -362,10 +369,10 @@ if __name__ == "__main__": logger.error("Unknown command. Use help to check the list of available commands") else: - logger.error("No command provided. Use help to check the list of available commands.") + logger.error("No command provided. Use help to check the list of available commands") except GetoptError as e: logger.error("{}".format(e)) except json.JSONDecodeError as e: - logger.error("Non-JSON encoded appointment passed as parameter.") + logger.error("Non-JSON encoded appointment passed as parameter") diff --git a/common/cryptographer.py b/common/cryptographer.py index 93fc72a..305da11 100644 --- a/common/cryptographer.py +++ b/common/cryptographer.py @@ -11,7 +11,7 @@ from cryptography.hazmat.primitives.serialization import load_der_public_key, lo from cryptography.exceptions import InvalidSignature from common.tools import check_sha256_hex_format -from pisa.logger import Logger +from common.logger import Logger logger = Logger("Cryptographer") @@ -39,12 +39,12 @@ class Cryptographer: """ if len(data) % 2: - error = "Incorrect (Odd-length) value." + error = "Incorrect (Odd-length) value" logger.error(error, data=data) raise ValueError(error) if not check_sha256_hex_format(secret): - error = "Secret must be a 32-byte hex value (64 hex chars)." + error = "Secret must be a 32-byte hex value (64 hex chars)" logger.error(error, secret=secret) raise ValueError(error) @@ -82,7 +82,7 @@ class Cryptographer: sk = sha256(unhexlify(secret)).digest() nonce = bytearray(12) - logger.info("Encrypting blob.", sk=hexlify(sk).decode(), nonce=hexlify(nonce).decode(), blob=blob.data) + logger.info("Encrypting blob", sk=hexlify(sk).decode(), nonce=hexlify(nonce).decode(), blob=blob.data) # Encrypt the data cipher = ChaCha20Poly1305(sk) @@ -124,7 +124,7 @@ class Cryptographer: nonce = bytearray(12) logger.info( - "Decrypting Blob.", + "Decrypting Blob", sk=hexlify(sk).decode(), nonce=hexlify(nonce).decode(), encrypted_blob=encrypted_blob.data, @@ -185,13 +185,13 @@ class Cryptographer: return pk except UnsupportedAlgorithm: - logger.error("Could not deserialize the public key (unsupported algorithm).") + logger.error("Could not deserialize the public key (unsupported algorithm)") except ValueError: - logger.error("The provided data cannot be deserialized (wrong size or format).") + logger.error("The provided data cannot be deserialized (wrong size or format)") except TypeError: - logger.error("The provided data cannot be deserialized (wrong type).") + logger.error("The provided data cannot be deserialized (wrong type)") return None @@ -220,10 +220,10 @@ class Cryptographer: raise ValueError("Could not deserialize the private key (unsupported algorithm).") except ValueError: - logger.error("The provided data cannot be deserialized (wrong size or format).") + logger.error("The provided data cannot be deserialized (wrong size or format)") except TypeError: - logger.error("The provided data cannot be deserialized (wrong type).") + logger.error("The provided data cannot be deserialized (wrong type)") @staticmethod def sign(data, sk, rtype="str"): @@ -246,7 +246,7 @@ class Cryptographer: raise ValueError("Wrong return type. Return type must be 'str' or 'bytes'") if not isinstance(sk, ec.EllipticCurvePrivateKey): - logger.error("The value passed as sk is not a private key (EllipticCurvePrivateKey).") + logger.error("The value passed as sk is not a private key (EllipticCurvePrivateKey)") return None else: @@ -274,7 +274,7 @@ class Cryptographer: """ if not isinstance(pk, ec.EllipticCurvePublicKey): - logger.error("The value passed as pk is not a public key (EllipticCurvePublicKey).") + logger.error("The value passed as pk is not a public key (EllipticCurvePublicKey)") return False if isinstance(signature, str): diff --git a/pisa/logger.py b/common/logger.py similarity index 72% rename from pisa/logger.py rename to common/logger.py index 237eafd..ac683c2 100644 --- a/pisa/logger.py +++ b/common/logger.py @@ -1,5 +1,5 @@ -import time import json +from datetime import datetime from pisa import f_logger, c_logger @@ -7,10 +7,10 @@ from pisa import f_logger, c_logger class _StructuredMessage: def __init__(self, message, **kwargs): self.message = message - self.time = time.asctime() + self.time = datetime.now().strftime("%d/%m/%Y %H:%M:%S") self.kwargs = kwargs - def __str__(self): + def to_dict(self): return {**self.kwargs, "message": self.message, "time": self.time} @@ -26,13 +26,24 @@ class Logger: self.actor = actor def _add_prefix(self, msg): - return msg if self.actor is None else "[{}] {}".format(self.actor, msg) + return msg if self.actor is None else "[{}]: {}".format(self.actor, msg) def _create_console_message(self, msg, **kwargs): - return _StructuredMessage(self._add_prefix(msg), actor=self.actor, **kwargs).message + s_message = _StructuredMessage(self._add_prefix(msg), **kwargs).to_dict() + message = "{} {}".format(s_message["time"], s_message["message"]) - def _create_file_message(self, msg, **kwargs): - return json.dumps(_StructuredMessage(msg, actor=self.actor, **kwargs).__str__()) + # s_message will always have at least two items (message and time). + if len(s_message) > 2: + params = "".join("{}={}, ".format(k, v) for k, v in s_message.items() if k not in ["message", "time"]) + + # Remove the extra 2 characters (space and comma) and add all data to the final message. + message += " ({})".format(params[:-2]) + + return message + + @staticmethod + def _create_file_message(msg, **kwargs): + return json.dumps(_StructuredMessage(msg, **kwargs).to_dict()) def info(self, msg, **kwargs): """ diff --git a/pisa/__init__.py b/pisa/__init__.py index f273b19..dd06913 100644 --- a/pisa/__init__.py +++ b/pisa/__init__.py @@ -22,6 +22,6 @@ c_logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setLevel(logging.INFO) -ch_formatter = logging.Formatter("%(asctime)s %(message)s", "%Y-%m-%d %H:%M:%S") +ch_formatter = logging.Formatter("%(message)s.", "%Y-%m-%d %H:%M:%S") ch.setFormatter(ch_formatter) c_logger.addHandler(ch) diff --git a/pisa/api.py b/pisa/api.py index b2d91db..2659e94 100644 --- a/pisa/api.py +++ b/pisa/api.py @@ -3,7 +3,7 @@ import json from flask import Flask, request, abort, jsonify from pisa import HOST, PORT, logging -from pisa.logger import Logger +from common.logger import Logger from pisa.inspector import Inspector from pisa.appointment import Appointment from pisa.block_processor import BlockProcessor diff --git a/pisa/block_processor.py b/pisa/block_processor.py index e7b93d9..9eb0ee2 100644 --- a/pisa/block_processor.py +++ b/pisa/block_processor.py @@ -1,4 +1,4 @@ -from pisa.logger import Logger +from common.logger import Logger from pisa.tools import bitcoin_cli from pisa.utils.auth_proxy import JSONRPCException @@ -30,7 +30,7 @@ class BlockProcessor: except JSONRPCException as e: block = None - logger.error("Couldn't get block from bitcoind.", error=e.error) + logger.error("Couldn't get block from bitcoind", error=e.error) return block @@ -50,7 +50,7 @@ class BlockProcessor: except JSONRPCException as e: block_hash = None - logger.error("Couldn't get block hash.", error=e.error) + logger.error("Couldn't get block hash", error=e.error) return block_hash @@ -94,7 +94,7 @@ class BlockProcessor: except JSONRPCException as e: tx = None - logger.error("Can't build transaction from decoded data.", error=e.error) + logger.error("Can't build transaction from decoded data", error=e.error) return tx diff --git a/pisa/carrier.py b/pisa/carrier.py index a83f234..d4029d4 100644 --- a/pisa/carrier.py +++ b/pisa/carrier.py @@ -1,5 +1,5 @@ from pisa.rpc_errors import * -from pisa.logger import Logger +from common.logger import Logger from pisa.tools import bitcoin_cli from pisa.utils.auth_proxy import JSONRPCException from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION, RPC_TX_REORGED_AFTER_BROADCAST @@ -97,7 +97,7 @@ class Carrier: else: # If something else happens (unlikely but possible) log it so we can treat it in future releases - logger.error("JSONRPCException.", method="Carrier.send_transaction", error=e.error) + logger.error("JSONRPCException", method="Carrier.send_transaction", error=e.error) receipt = Receipt(delivered=False, reason=UNKNOWN_JSON_RPC_EXCEPTION) return receipt @@ -129,6 +129,6 @@ class Carrier: else: # If something else happens (unlikely but possible) log it so we can treat it in future releases - logger.error("JSONRPCException.", method="Carrier.get_transaction", error=e.error) + logger.error("JSONRPCException", method="Carrier.get_transaction", error=e.error) return tx_info diff --git a/pisa/cleaner.py b/pisa/cleaner.py index c5996ca..a00cc22 100644 --- a/pisa/cleaner.py +++ b/pisa/cleaner.py @@ -1,4 +1,4 @@ -from pisa.logger import Logger +from common.logger import Logger logger = Logger("Cleaner") @@ -37,7 +37,7 @@ class Cleaner: else: locator_uuid_map[locator].remove(uuid) - logger.info("End time reached with no breach. Deleting appointment.", locator=locator, uuid=uuid) + logger.info("End time reached with no breach. Deleting appointment", locator=locator, uuid=uuid) # Delete appointment from the db db_manager.delete_watcher_appointment(uuid) @@ -92,7 +92,7 @@ class Cleaner: for uuid, confirmations in completed_trackers: logger.info( - "Appointment completed. Appointment ended after reaching enough confirmations.", + "Appointment completed. Appointment ended after reaching enough confirmations", uuid=uuid, height=height, confirmations=confirmations, @@ -105,7 +105,7 @@ class Cleaner: if len(tx_tracker_map[penalty_txid]) == 1: tx_tracker_map.pop(penalty_txid) - logger.info("No more trackers for penalty transaction.", penalty_txid=penalty_txid) + logger.info("No more trackers for penalty transaction", penalty_txid=penalty_txid) else: tx_tracker_map[penalty_txid].remove(uuid) @@ -126,7 +126,7 @@ class Cleaner: db_manager.store_update_locator_map(locator, locator_map) else: - logger.error("UUID not found in the db.", uuid=uuid) + logger.error("UUID not found in the db", uuid=uuid) else: - logger.error("Locator not found in the db.", uuid=uuid) + logger.error("Locator not found in the db", uuid=uuid) diff --git a/pisa/db_manager.py b/pisa/db_manager.py index 4d654f4..d9c566c 100644 --- a/pisa/db_manager.py +++ b/pisa/db_manager.py @@ -1,7 +1,7 @@ import json import plyvel -from pisa.logger import Logger +from common.logger import Logger logger = Logger("DBManager") diff --git a/pisa/inspector.py b/pisa/inspector.py index 323928d..4de7fe6 100644 --- a/pisa/inspector.py +++ b/pisa/inspector.py @@ -6,7 +6,7 @@ from common.cryptographer import Cryptographer from pisa import errors import pisa.conf as conf -from pisa.logger import Logger +from common.logger import Logger from pisa.appointment import Appointment from pisa.block_processor import BlockProcessor diff --git a/pisa/pisad.py b/pisa/pisad.py index 0e8160b..ecf9a6f 100644 --- a/pisa/pisad.py +++ b/pisa/pisad.py @@ -3,7 +3,7 @@ from sys import argv, exit from signal import signal, SIGINT, SIGQUIT, SIGTERM from pisa.conf import DB_PATH -from pisa.logger import Logger +from common.logger import Logger from pisa.api import start_api from pisa.watcher import Watcher from pisa.builder import Builder diff --git a/pisa/responder.py b/pisa/responder.py index 6288061..0fce1cb 100644 --- a/pisa/responder.py +++ b/pisa/responder.py @@ -2,7 +2,7 @@ import json from queue import Queue from threading import Thread -from pisa.logger import Logger +from common.logger import Logger from pisa.cleaner import Cleaner from pisa.carrier import Carrier from pisa.block_processor import BlockProcessor @@ -211,7 +211,7 @@ class Responder: # TODO: Add the missing reasons (e.g. RPC_VERIFY_REJECTED) # TODO: Use self.on_sync(block_hash) to check whether or not we failed because we are out of sync logger.warning( - "Tracker cannot be created.", reason=receipt.reason, uuid=uuid, on_sync=self.on_sync(block_hash) + "Tracker cannot be created", reason=receipt.reason, uuid=uuid, on_sync=self.on_sync(block_hash) ) pass @@ -255,7 +255,7 @@ class Responder: self.db_manager.store_responder_tracker(uuid, tracker.to_json()) logger.info( - "New tracker added.", dispute_txid=dispute_txid, penalty_txid=penalty_txid, appointment_end=appointment_end + "New tracker added", dispute_txid=dispute_txid, penalty_txid=penalty_txid, appointment_end=appointment_end ) if self.asleep: @@ -436,7 +436,7 @@ class Responder: for uuid in self.tx_tracker_map[txid]: tracker = self.trackers[uuid] logger.warning( - "Transaction has missed many confirmations. Rebroadcasting.", penalty_txid=tracker.penalty_txid + "Transaction has missed many confirmations. Rebroadcasting", penalty_txid=tracker.penalty_txid ) receipt = carrier.send_transaction(tracker.penalty_rawtx, tracker.penalty_txid) @@ -444,7 +444,7 @@ class Responder: if not receipt.delivered: # FIXME: Can this actually happen? - logger.warning("Transaction failed.", penalty_txid=tracker.penalty_txid) + logger.warning("Transaction failed", penalty_txid=tracker.penalty_txid) return receipts @@ -475,7 +475,7 @@ class Responder: self.unconfirmed_txs.append(tracker.penalty_txid) logger.info( - "Penalty transaction back in mempool. Updating unconfirmed transactions.", + "Penalty transaction back in mempool. Updating unconfirmed transactions", penalty_txid=tracker.penalty_txid, ) @@ -502,5 +502,5 @@ class Responder: # ToDo: #24-properly-handle-reorgs # FIXME: if the dispute is not on chain (either in mempool or not there at all), we need to call the # reorg manager - logger.warning("Dispute and penalty transaction missing. Calling the reorg manager.") - logger.error("Reorg manager not yet implemented.") + logger.warning("Dispute and penalty transaction missing. Calling the reorg manager") + logger.error("Reorg manager not yet implemented") diff --git a/pisa/utils/zmq_subscriber.py b/pisa/utils/zmq_subscriber.py index 95c5996..f513146 100644 --- a/pisa/utils/zmq_subscriber.py +++ b/pisa/utils/zmq_subscriber.py @@ -1,6 +1,6 @@ import zmq import binascii -from pisa.logger import Logger +from common.logger import Logger from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT diff --git a/pisa/watcher.py b/pisa/watcher.py index 25c7db8..5079050 100644 --- a/pisa/watcher.py +++ b/pisa/watcher.py @@ -5,7 +5,7 @@ from threading import Thread from common.cryptographer import Cryptographer from common.constants import LOCATOR_LEN_HEX -from pisa.logger import Logger +from common.logger import Logger from pisa.cleaner import Cleaner from pisa.responder import Responder from pisa.block_processor import BlockProcessor @@ -139,7 +139,7 @@ class Watcher: appointment_added = True - logger.info("New appointment accepted.", locator=appointment.locator) + logger.info("New appointment accepted", locator=appointment.locator) signature = Cryptographer.sign(Cryptographer.signature_format(appointment.to_dict()), self.signing_key) @@ -147,7 +147,7 @@ class Watcher: appointment_added = False signature = None - logger.info("Maximum appointments reached, appointment rejected.", locator=appointment.locator) + logger.info("Maximum appointments reached, appointment rejected", locator=appointment.locator) return appointment_added, signature @@ -177,7 +177,7 @@ class Watcher: if block is not None: txids = block.get("tx") - logger.info("List of transactions.", txids=txids) + logger.info("List of transactions", txids=txids) expired_appointments = [ uuid @@ -195,7 +195,7 @@ class Watcher: # Errors decrypting the Blob will result in a None penalty_txid if filtered_breach["valid_breach"] is True: logger.info( - "Notifying responder and deleting appointment.", + "Notifying responder and deleting appointment", penalty_txid=filtered_breach["penalty_txid"], locator=filtered_breach["locator"], uuid=uuid, @@ -287,7 +287,7 @@ class Watcher: penalty_txid = penalty_tx.get("txid") valid_breach = True - logger.info("Breach found for locator.", locator=locator, uuid=uuid, penalty_txid=penalty_txid) + logger.info("Breach found for locator", locator=locator, uuid=uuid, penalty_txid=penalty_txid) else: penalty_txid = None