Merge pull request #85 from sr-gi/improve_logging

Fixes logging issues
This commit is contained in:
Sergi Delgado Segura
2020-01-23 18:23:44 +01:00
committed by GitHub
11 changed files with 64 additions and 22 deletions

View File

@@ -1,8 +1,7 @@
import json
import logging
from datetime import datetime
from pisa import f_logger, c_logger
class _StructuredMessage:
def __init__(self, message, **kwargs):
@@ -22,8 +21,10 @@ class Logger:
actor (:obj:`str`): the system actor that is logging the event (e.g. ``Watcher``, ``Cryptographer``, ...).
"""
def __init__(self, actor=None):
def __init__(self, log_name_prefix, actor=None):
self.actor = actor
self.f_logger = logging.getLogger("{}_file_log".format(log_name_prefix))
self.c_logger = logging.getLogger("{}_console_log".format(log_name_prefix))
def _add_prefix(self, msg):
return msg if self.actor is None else "[{}]: {}".format(self.actor, msg)
@@ -54,8 +55,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output.
"""
f_logger.info(self._create_file_message(msg, **kwargs))
c_logger.info(self._create_console_message(msg, **kwargs))
self.f_logger.info(self._create_file_message(msg, **kwargs))
self.c_logger.info(self._create_console_message(msg, **kwargs))
def debug(self, msg, **kwargs):
"""
@@ -66,8 +67,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output.
"""
f_logger.debug(self._create_file_message(msg, **kwargs))
c_logger.debug(self._create_console_message(msg, **kwargs))
self.f_logger.debug(self._create_file_message(msg, **kwargs))
self.c_logger.debug(self._create_console_message(msg, **kwargs))
def error(self, msg, **kwargs):
"""
@@ -78,8 +79,8 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output.
"""
f_logger.error(self._create_file_message(msg, **kwargs))
c_logger.error(self._create_console_message(msg, **kwargs))
self.f_logger.error(self._create_file_message(msg, **kwargs))
self.c_logger.error(self._create_console_message(msg, **kwargs))
def warning(self, msg, **kwargs):
"""
@@ -90,5 +91,5 @@ class Logger:
kwargs: a ``key:value`` collection parameters to be added to the output.
"""
f_logger.warning(self._create_file_message(msg, **kwargs))
c_logger.warning(self._create_console_message(msg, **kwargs))
self.f_logger.warning(self._create_file_message(msg, **kwargs))
self.c_logger.warning(self._create_console_message(msg, **kwargs))

View File

@@ -1,5 +1,6 @@
import re
import os
import logging
from common.constants import LOCATOR_LEN_HEX
@@ -69,3 +70,32 @@ def extend_paths(base_path, config_fields):
config_fields[key]["value"] = base_path + config_fields[key]["value"]
return config_fields
def setup_logging(log_file_path, log_name_prefix):
if not isinstance(log_file_path, str):
print(log_file_path)
raise ValueError("Wrong log file path.")
if not isinstance(log_name_prefix, str):
raise ValueError("Wrong log file name.")
# Create the file logger
f_logger = logging.getLogger("{}_file_log".format(log_name_prefix))
f_logger.setLevel(logging.INFO)
fh = logging.FileHandler(log_file_path)
fh.setLevel(logging.INFO)
fh_formatter = logging.Formatter("%(message)s")
fh.setFormatter(fh_formatter)
f_logger.addHandler(fh)
# Create the console logger
c_logger = logging.getLogger("{}_console_log".format(log_name_prefix))
c_logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch_formatter = logging.Formatter("%(message)s.", "%Y-%m-%d %H:%M:%S")
ch.setFormatter(ch_formatter)
c_logger.addHandler(ch)

View File

@@ -1,8 +1,9 @@
import os
import json
import logging
from flask import Flask, request, abort, jsonify
from pisa import HOST, PORT, logging
from pisa import HOST, PORT, LOG_PREFIX
from common.logger import Logger
from pisa.inspector import Inspector
from common.appointment import Appointment
@@ -13,7 +14,7 @@ from common.constants import HTTP_OK, HTTP_BAD_REQUEST, HTTP_SERVICE_UNAVAILABLE
# ToDo: #5-add-async-to-api
app = Flask(__name__)
logger = Logger("API")
logger = Logger(actor="API", log_name_prefix=LOG_PREFIX)
class API:

View File

@@ -1,8 +1,10 @@
from common.logger import Logger
from pisa import LOG_PREFIX
from pisa.tools import bitcoin_cli
from pisa.utils.auth_proxy import JSONRPCException
logger = Logger("BlockProcessor")
logger = Logger(actor="BlockProcessor", log_name_prefix=LOG_PREFIX)
class BlockProcessor:

View File

@@ -1,10 +1,11 @@
from pisa import LOG_PREFIX
from pisa.rpc_errors import *
from common.logger import Logger
from pisa.tools import bitcoin_cli
from pisa.utils.auth_proxy import JSONRPCException
from pisa.errors import UNKNOWN_JSON_RPC_EXCEPTION, RPC_TX_REORGED_AFTER_BROADCAST
logger = Logger("Carrier")
logger = Logger(actor="Carrier", log_name_prefix=LOG_PREFIX)
# FIXME: This class is not fully covered by unit tests

View File

@@ -2,11 +2,12 @@ import zmq
import binascii
from threading import Thread, Event, Condition
from pisa import LOG_PREFIX
from common.logger import Logger
from pisa.conf import FEED_PROTOCOL, FEED_ADDR, FEED_PORT, POLLING_DELTA, BLOCK_WINDOW_SIZE
from pisa.block_processor import BlockProcessor
logger = Logger("ChainMonitor")
logger = Logger(actor="ChainMonitor", log_name_prefix=LOG_PREFIX)
class ChainMonitor:

View File

@@ -1,6 +1,8 @@
from pisa import LOG_PREFIX
from common.logger import Logger
logger = Logger("Cleaner")
logger = Logger(actor="Cleaner", log_name_prefix=LOG_PREFIX)
class Cleaner:

View File

@@ -1,9 +1,11 @@
import json
import plyvel
from pisa import LOG_PREFIX
from common.logger import Logger
logger = Logger("DBManager")
logger = Logger(actor="DBManager", log_name_prefix=LOG_PREFIX)
WATCHER_PREFIX = "w"
WATCHER_LAST_BLOCK_KEY = "bw"

View File

@@ -4,12 +4,12 @@ from binascii import unhexlify
from common.constants import LOCATOR_LEN_HEX
from common.cryptographer import Cryptographer
from pisa import errors
from pisa import errors, LOG_PREFIX
from common.logger import Logger
from common.appointment import Appointment
from pisa.block_processor import BlockProcessor
logger = Logger("Inspector")
logger = Logger(actor="Inspector", log_name_prefix=LOG_PREFIX)
# FIXME: The inspector logs the wrong messages sent form the users. A possible attack surface would be to send a really
# long field that, even if not accepted by PISA, would be stored in the logs. This is a possible DoS surface

View File

@@ -2,6 +2,7 @@ import json
from queue import Queue
from threading import Thread
from pisa import LOG_PREFIX
from common.logger import Logger
from pisa.cleaner import Cleaner
from pisa.carrier import Carrier
@@ -10,7 +11,7 @@ from pisa.block_processor import BlockProcessor
CONFIRMATIONS_BEFORE_RETRY = 6
MIN_CONFIRMATIONS = 6
logger = Logger("Responder")
logger = Logger(actor="Responder", log_name_prefix=LOG_PREFIX)
class TransactionTracker:

View File

@@ -8,11 +8,12 @@ from common.tools import compute_locator
from common.logger import Logger
from pisa import LOG_PREFIX
from pisa.cleaner import Cleaner
from pisa.responder import Responder
from pisa.block_processor import BlockProcessor
logger = Logger("Watcher")
logger = Logger(actor="Watcher", log_name_prefix=LOG_PREFIX)
class Watcher: