feat: Implement LoggingConfig and structured logging

- Created a new `LoggingConfig` class to represent the logging configuration in the `Config` class.
- Created a new `LogFormatName` enum to represent the available log formats: 'simple', 'debug', and 'structured_google_cloud'.
- Modified the `configure_logging` function to also accept an unpacked `LoggingConfig` object for arguments.
- Updated the `configure_logging` function to use the appropriate log format based on the log level.
- Added a `StructuredLoggingFormatter` class to handle formatting for structured logs.
- Updated the import statements and usages of `configure_logging` etc. in relevant modules to reflect the changes.
- Updated the `config` fixture in the unit tests to include the new logging configuration attributes.
- Updated the CLI with new parameters for log level and format.
- Reordered the parameters of the CLI.
- Removed memory related parameter from CLI.
This commit is contained in:
Reinier van der Leer
2023-11-30 16:20:18 +01:00
parent c65d6f7ff8
commit ae1e030824
12 changed files with 434 additions and 197 deletions

View File

@@ -1,4 +1,5 @@
import asyncio
import logging
import sys
from pathlib import Path
@@ -19,20 +20,16 @@ def run_specific_agent(task: str, continuous_mode: bool = False) -> None:
def bootstrap_agent(task: str, continuous_mode: bool) -> Agent:
config = ConfigBuilder.build_config_from_env()
config.debug_mode = False
config.logging.level = logging.DEBUG
config.logging.log_dir = LOG_DIR
config.logging.plain_console_output = True
configure_logging(**config.logging.dict())
config.continuous_mode = continuous_mode
config.continuous_limit = 20
config.temperature = 0
config.noninteractive_mode = True
config.plain_output = True
config.memory_backend = "no_memory"
configure_logging(
debug_mode=config.debug_mode,
plain_output=config.plain_output,
log_dir=LOG_DIR,
)
command_registry = CommandRegistry.with_command_modules(COMMAND_CATEGORIES, config)
ai_profile = AIProfile(

View File

@@ -1,9 +1,12 @@
"""Main script for the autogpt package."""
from logging import _nameToLevel as logLevelMap
from pathlib import Path
from typing import Optional
import click
from autogpt.logs.config import LogFormatName
@click.group(invoke_without_command=True)
@click.pass_context
@@ -15,6 +18,43 @@ def cli(ctx: click.Context):
@cli.command()
@click.option("-c", "--continuous", is_flag=True, help="Enable Continuous Mode")
@click.option(
"-l",
"--continuous-limit",
type=int,
help="Defines the number of times to run in continuous mode",
)
@click.option("--speak", is_flag=True, help="Enable Speak Mode")
@click.option("--gpt3only", is_flag=True, help="Enable GPT3.5 Only Mode")
@click.option("--gpt4only", is_flag=True, help="Enable GPT4 Only Mode")
@click.option(
"-b",
"--browser-name",
help="Specifies which web-browser to use when using selenium to scrape the web.",
)
@click.option(
"--allow-downloads",
is_flag=True,
help="Dangerous: Allows AutoGPT to download files natively.",
)
@click.option(
# TODO: this is a hidden option for now, necessary for integration testing.
# We should make this public once we're ready to roll out agent specific workspaces.
"--workspace-directory",
"-w",
type=click.Path(file_okay=False),
hidden=True,
)
@click.option(
"--install-plugin-deps",
is_flag=True,
help="Installs external dependencies for 3rd party plugins.",
)
@click.option(
"--skip-news",
is_flag=True,
help="Specifies whether to suppress the output of latest news on startup.",
)
@click.option(
"--skip-reprompt",
"-y",
@@ -30,57 +70,6 @@ def cli(ctx: click.Context):
" root directory. Will also automatically skip the re-prompt."
),
)
@click.option(
"--prompt-settings",
"-P",
type=click.Path(exists=True, dir_okay=False, path_type=Path),
help="Specifies which prompt_settings.yaml file to use.",
)
@click.option(
"-l",
"--continuous-limit",
type=int,
help="Defines the number of times to run in continuous mode",
)
@click.option("--speak", is_flag=True, help="Enable Speak Mode")
@click.option("--debug", is_flag=True, help="Enable Debug Mode")
@click.option("--gpt3only", is_flag=True, help="Enable GPT3.5 Only Mode")
@click.option("--gpt4only", is_flag=True, help="Enable GPT4 Only Mode")
@click.option(
"--use-memory",
"-m",
"memory_type",
type=str,
help="Defines which Memory backend to use",
)
@click.option(
"-b",
"--browser-name",
help="Specifies which web-browser to use when using selenium to scrape the web.",
)
@click.option(
"--allow-downloads",
is_flag=True,
help="Dangerous: Allows AutoGPT to download files natively.",
)
@click.option(
"--skip-news",
is_flag=True,
help="Specifies whether to suppress the output of latest news on startup.",
)
@click.option(
# TODO: this is a hidden option for now, necessary for integration testing.
# We should make this public once we're ready to roll out agent specific workspaces.
"--workspace-directory",
"-w",
type=click.Path(),
hidden=True,
)
@click.option(
"--install-plugin-deps",
is_flag=True,
help="Installs external dependencies for 3rd party plugins.",
)
@click.option(
"--ai-name",
type=str,
@@ -91,6 +80,12 @@ def cli(ctx: click.Context):
type=str,
help="AI role override",
)
@click.option(
"--prompt-settings",
"-P",
type=click.Path(exists=True, dir_okay=False, path_type=Path),
help="Specifies which prompt_settings.yaml file to use.",
)
@click.option(
"--constraint",
type=str,
@@ -126,28 +121,51 @@ def cli(ctx: click.Context):
" the AI's directives instead of being appended to them"
),
)
@click.option(
"--debug", is_flag=True, help="Implies --log-level=DEBUG --log-format=debug"
)
@click.option("--log-level", type=click.Choice([*logLevelMap.keys()]))
@click.option(
"--log-format",
help=(
"Choose a log format; defaults to 'simple'."
" Also implies --log-file-format, unless it is specified explicitly."
" Using the 'structured_google_cloud' format disables log file output."
),
type=click.Choice([i.value for i in LogFormatName]),
)
@click.option(
"--log-file-format",
help=(
"Override the format used for the log file output."
" Defaults to the application's global --log-format."
),
type=click.Choice([i.value for i in LogFormatName]),
)
def run(
continuous: bool,
continuous_limit: int,
ai_settings: Optional[Path],
prompt_settings: Optional[Path],
skip_reprompt: bool,
continuous_limit: Optional[int],
speak: bool,
debug: bool,
gpt3only: bool,
gpt4only: bool,
memory_type: str,
browser_name: str,
browser_name: Optional[str],
allow_downloads: bool,
skip_news: bool,
workspace_directory: str,
workspace_directory: Optional[Path],
install_plugin_deps: bool,
skip_news: bool,
skip_reprompt: bool,
ai_settings: Optional[Path],
ai_name: Optional[str],
ai_role: Optional[str],
prompt_settings: Optional[Path],
resource: tuple[str],
constraint: tuple[str],
best_practice: tuple[str],
override_directives: bool,
debug: bool,
log_level: Optional[str],
log_format: Optional[str],
log_file_format: Optional[str],
) -> None:
"""
Sets up and runs an agent, based on the task specified by the user, or resumes an
@@ -164,9 +182,11 @@ def run(
skip_reprompt=skip_reprompt,
speak=speak,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
memory_type=memory_type,
browser_name=browser_name,
allow_downloads=allow_downloads,
skip_news=skip_news,
@@ -188,16 +208,8 @@ def run(
type=click.Path(exists=True, dir_okay=False, path_type=Path),
help="Specifies which prompt_settings.yaml file to use.",
)
@click.option("--debug", is_flag=True, help="Enable Debug Mode")
@click.option("--gpt3only", is_flag=True, help="Enable GPT3.5 Only Mode")
@click.option("--gpt4only", is_flag=True, help="Enable GPT4 Only Mode")
@click.option(
"--use-memory",
"-m",
"memory_type",
type=str,
help="Defines which Memory backend to use",
)
@click.option(
"-b",
"--browser-name",
@@ -213,15 +225,38 @@ def run(
is_flag=True,
help="Installs external dependencies for 3rd party plugins.",
)
@click.option(
"--debug", is_flag=True, help="Implies --log-level=DEBUG --log-format=debug"
)
@click.option("--log-level", type=click.Choice([*logLevelMap.keys()]))
@click.option(
"--log-format",
help=(
"Choose a log format; defaults to 'simple'."
" Also implies --log-file-format, unless it is specified explicitly."
" Using the 'structured_google_cloud' format disables log file output."
),
type=click.Choice([i.value for i in LogFormatName]),
)
@click.option(
"--log-file-format",
help=(
"Override the format used for the log file output."
" Defaults to the application's global --log-format."
),
type=click.Choice([i.value for i in LogFormatName]),
)
def serve(
prompt_settings: Optional[Path],
debug: bool,
gpt3only: bool,
gpt4only: bool,
memory_type: str,
browser_name: str,
browser_name: Optional[str],
allow_downloads: bool,
install_plugin_deps: bool,
debug: bool,
log_level: Optional[str],
log_format: Optional[str],
log_file_format: Optional[str],
) -> None:
"""
Starts an Agent Protocol compliant AutoGPT server, which creates a custom agent for
@@ -233,9 +268,11 @@ def serve(
run_auto_gpt_server(
prompt_settings=prompt_settings,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
memory_type=memory_type,
browser_name=browser_name,
allow_downloads=allow_downloads,
install_plugin_deps=install_plugin_deps,

View File

@@ -12,6 +12,7 @@ from autogpt import utils
from autogpt.config import Config
from autogpt.config.config import GPT_3_MODEL, GPT_4_MODEL
from autogpt.llm.api_manager import ApiManager
from autogpt.logs.config import LogFormatName
from autogpt.logs.helpers import print_attribute, request_user_double_check
from autogpt.memory.vector import get_supported_memory_backends
@@ -27,10 +28,13 @@ def apply_overrides_to_config(
skip_reprompt: bool = False,
speak: bool = False,
debug: bool = False,
log_level: Optional[str] = None,
log_format: Optional[str] = None,
log_file_format: Optional[str] = None,
gpt3only: bool = False,
gpt4only: bool = False,
memory_type: str = "",
browser_name: str = "",
memory_type: Optional[str] = None,
browser_name: Optional[str] = None,
allow_downloads: bool = False,
skip_news: bool = False,
) -> None:
@@ -51,13 +55,20 @@ def apply_overrides_to_config(
allow_downloads (bool): Whether to allow AutoGPT to download files natively
skips_news (bool): Whether to suppress the output of latest news on startup
"""
config.debug_mode = False
config.continuous_mode = False
config.tts_config.speak_mode = False
# Set log level
if debug:
print_attribute("Debug mode", "ENABLED")
config.debug_mode = True
config.logging.level = logging.DEBUG
elif log_level and type(_level := logging.getLevelName(log_level.upper())) is int:
config.logging.level = _level
# Set log format
if log_format and log_format in LogFormatName._value2member_map_:
config.logging.log_format = LogFormatName(log_format)
if log_file_format and log_file_format in LogFormatName._value2member_map_:
config.logging.log_file_format = LogFormatName(log_file_format)
if continuous:
print_attribute("Continuous Mode", "ENABLED", title_color=Fore.YELLOW)

View File

@@ -51,23 +51,25 @@ from .utils import (
@coroutine
async def run_auto_gpt(
continuous: bool,
continuous_limit: int,
ai_settings: Optional[Path],
prompt_settings: Optional[Path],
skip_reprompt: bool,
speak: bool,
debug: bool,
gpt3only: bool,
gpt4only: bool,
memory_type: str,
browser_name: str,
allow_downloads: bool,
skip_news: bool,
workspace_directory: Path,
install_plugin_deps: bool,
override_ai_name: str = "",
override_ai_role: str = "",
continuous: bool = False,
continuous_limit: Optional[int] = None,
ai_settings: Optional[Path] = None,
prompt_settings: Optional[Path] = None,
skip_reprompt: bool = False,
speak: bool = False,
debug: bool = False,
log_level: Optional[str] = None,
log_format: Optional[str] = None,
log_file_format: Optional[str] = None,
gpt3only: bool = False,
gpt4only: bool = False,
browser_name: Optional[str] = None,
allow_downloads: bool = False,
skip_news: bool = False,
workspace_directory: Optional[Path] = None,
install_plugin_deps: bool = False,
override_ai_name: Optional[str] = None,
override_ai_role: Optional[str] = None,
resources: Optional[list[str]] = None,
constraints: Optional[list[str]] = None,
best_practices: Optional[list[str]] = None,
@@ -87,9 +89,11 @@ async def run_auto_gpt(
skip_reprompt=skip_reprompt,
speak=speak,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
memory_type=memory_type,
browser_name=browser_name,
allow_downloads=allow_downloads,
skip_news=skip_news,
@@ -97,8 +101,7 @@ async def run_auto_gpt(
# Set up logging module
configure_logging(
debug_mode=debug,
plain_output=config.plain_output,
**config.logging.dict(),
tts_config=config.tts_config,
)
@@ -294,14 +297,16 @@ async def run_auto_gpt(
@coroutine
async def run_auto_gpt_server(
prompt_settings: Optional[Path],
debug: bool,
gpt3only: bool,
gpt4only: bool,
memory_type: str,
browser_name: str,
allow_downloads: bool,
install_plugin_deps: bool,
prompt_settings: Optional[Path] = None,
debug: bool = False,
log_level: Optional[str] = None,
log_format: Optional[str] = None,
log_file_format: Optional[str] = None,
gpt3only: bool = False,
gpt4only: bool = False,
browser_name: Optional[str] = None,
allow_downloads: bool = False,
install_plugin_deps: bool = False,
):
from .agent_protocol_server import AgentProtocolServer
@@ -314,17 +319,18 @@ async def run_auto_gpt_server(
config=config,
prompt_settings_file=prompt_settings,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
memory_type=memory_type,
browser_name=browser_name,
allow_downloads=allow_downloads,
)
# Set up logging module
configure_logging(
debug_mode=debug,
plain_output=config.plain_output,
**config.logging.dict(),
tts_config=config.tts_config,
)
@@ -410,7 +416,9 @@ async def run_interaction_loop(
cycle_budget = cycles_remaining = _get_cycle_budget(
legacy_config.continuous_mode, legacy_config.continuous_limit
)
spinner = Spinner("Thinking...", plain_output=legacy_config.plain_output)
spinner = Spinner(
"Thinking...", plain_output=legacy_config.logging.plain_console_output
)
stop_reason = None
def graceful_agent_interrupt(signum: int, frame: Optional[FrameType]) -> None:

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import contextlib
import logging
import os
import re
from pathlib import Path
@@ -15,6 +16,7 @@ from pydantic import Field, validator
import autogpt
from autogpt.core.configuration.schema import Configurable, SystemSettings
from autogpt.core.resource.model_providers.openai import OPEN_AI_CHAT_MODELS
from autogpt.logs.config import LogFormatName, LoggingConfig
from autogpt.plugins.plugins_config import PluginsConfig
from autogpt.speech import TTSConfig
@@ -40,12 +42,11 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
skip_reprompt: bool = False
authorise_key: str = "y"
exit_key: str = "n"
debug_mode: bool = False
plain_output: bool = False
noninteractive_mode: bool = False
chat_messages_enabled: bool = True
# TTS configuration
tts_config: TTSConfig = TTSConfig()
logging: LoggingConfig = LoggingConfig()
##########################
# Agent Control Settings #
@@ -226,9 +227,16 @@ class ConfigBuilder(Configurable[Config]):
"""Initialize the Config class"""
config_dict = {
"project_root": project_root,
"logging": {
"level": logging.getLevelName(os.getenv("LOG_LEVEL", "INFO")),
"log_format": LogFormatName(os.getenv("LOG_FORMAT", "simple")),
"log_file_format": LogFormatName(
os.getenv("LOG_FILE_FORMAT", os.getenv("LOG_FORMAT", "simple"))
),
"plain_console_output": os.getenv("PLAIN_OUTPUT", "False") == "True",
},
"authorise_key": os.getenv("AUTHORISE_COMMAND_KEY"),
"exit_key": os.getenv("EXIT_KEY"),
"plain_output": os.getenv("PLAIN_OUTPUT", "False") == "True",
"shell_command_control": os.getenv("SHELL_COMMAND_CONTROL"),
"ai_settings_file": project_root
/ Path(os.getenv("AI_SETTINGS_FILE", AI_SETTINGS_FILE)),

View File

@@ -79,10 +79,10 @@ async def interaction_step(
def bootstrap_agent(task, continuous_mode) -> Agent:
config = ConfigBuilder.build_config_from_env()
config.debug_mode = True
config.logging.level = logging.DEBUG
config.logging.plain_console_output = True
config.continuous_mode = continuous_mode
config.temperature = 0
config.plain_output = True
command_registry = CommandRegistry.with_command_modules(COMMAND_CATEGORIES, config)
config.memory_backend = "no_memory"
ai_profile = AIProfile(

View File

@@ -1,21 +1,24 @@
"""Logging module for Auto-GPT."""
from __future__ import annotations
import enum
import logging
import sys
from pathlib import Path
from typing import TYPE_CHECKING, Optional
from auto_gpt_plugin_template import AutoGPTPluginTemplate
from google.cloud.logging_v2.handlers import CloudLoggingFilter
from openai.util import logger as openai_logger
if TYPE_CHECKING:
from autogpt.config import Config
from autogpt.speech import TTSConfig
from autogpt.core.configuration import SystemConfiguration
from autogpt.core.runner.client_lib.logging import BelowLevelFilter
from .formatters import AutoGptFormatter
from .formatters import AutoGptFormatter, StructuredLoggingFormatter
from .handlers import TTSHandler, TypingConsoleHandler
LOG_DIR = Path(__file__).parent.parent.parent / "logs"
@@ -34,80 +37,127 @@ USER_FRIENDLY_OUTPUT_LOGGER = "USER_FRIENDLY_OUTPUT"
_chat_plugins: list[AutoGPTPluginTemplate] = []
class LogFormatName(str, enum.Enum):
SIMPLE = "simple"
DEBUG = "debug"
STRUCTURED = "structured_google_cloud"
TEXT_LOG_FORMAT_MAP = {
LogFormatName.DEBUG: DEBUG_LOG_FORMAT,
LogFormatName.SIMPLE: SIMPLE_LOG_FORMAT,
}
class LoggingConfig(SystemConfiguration):
level: int = logging.INFO
# Console output
log_format: LogFormatName = LogFormatName.SIMPLE
plain_console_output: bool = False
# File output
log_dir: Path = LOG_DIR
log_file_format: Optional[LogFormatName] = LogFormatName.SIMPLE
def configure_logging(
debug_mode: bool = False,
plain_output: bool = False,
tts_config: Optional[TTSConfig] = None,
level: int = logging.INFO,
log_dir: Path = LOG_DIR,
log_format: Optional[LogFormatName] = None,
log_file_format: Optional[LogFormatName] = None,
plain_console_output: bool = False,
tts_config: Optional[TTSConfig] = None,
) -> None:
"""Configure the native logging module."""
"""Configure the native logging module.
Should be usable as `configure_logging(**config.logging.dict())`, where
`config.logging` is a `LoggingConfig` object.
"""
# Auto-adjust default log format based on log level
log_format = log_format or (
LogFormatName.SIMPLE if level != logging.DEBUG else LogFormatName.DEBUG
)
log_file_format = log_file_format or log_format
structured_logging = log_format == LogFormatName.STRUCTURED
if structured_logging:
plain_console_output = True
log_file_format = None
# create log directory if it doesn't exist
if not log_dir.exists():
log_dir.mkdir()
log_level = logging.DEBUG if debug_mode else logging.INFO
log_format = DEBUG_LOG_FORMAT if debug_mode else SIMPLE_LOG_FORMAT
console_formatter = AutoGptFormatter(log_format)
log_handlers: list[logging.Handler] = []
if log_format in (LogFormatName.DEBUG, LogFormatName.SIMPLE):
console_format_template = TEXT_LOG_FORMAT_MAP[log_format]
console_formatter = AutoGptFormatter(console_format_template)
else:
console_formatter = StructuredLoggingFormatter()
console_format_template = SIMPLE_LOG_FORMAT
# Console output handlers
stdout = logging.StreamHandler(stream=sys.stdout)
stdout.setLevel(log_level)
stdout.setLevel(level)
stdout.addFilter(BelowLevelFilter(logging.WARNING))
stdout.setFormatter(console_formatter)
stderr = logging.StreamHandler()
stderr.setLevel(logging.WARNING)
stderr.setFormatter(console_formatter)
# INFO log file handler
activity_log_handler = logging.FileHandler(log_dir / LOG_FILE, "a", "utf-8")
activity_log_handler.setLevel(logging.INFO)
activity_log_handler.setFormatter(
AutoGptFormatter(SIMPLE_LOG_FORMAT, no_color=True)
)
if debug_mode:
# DEBUG log file handler
debug_log_handler = logging.FileHandler(log_dir / DEBUG_LOG_FILE, "a", "utf-8")
debug_log_handler.setLevel(logging.DEBUG)
debug_log_handler.setFormatter(
AutoGptFormatter(DEBUG_LOG_FORMAT, no_color=True)
)
# ERROR log file handler
error_log_handler = logging.FileHandler(log_dir / ERROR_LOG_FILE, "a", "utf-8")
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(AutoGptFormatter(DEBUG_LOG_FORMAT, no_color=True))
# Configure the root logger
logging.basicConfig(
format=log_format,
level=log_level,
handlers=(
[stdout, stderr, activity_log_handler, error_log_handler]
+ ([debug_log_handler] if debug_mode else [])
),
)
## Set up user-friendly loggers
log_handlers += [stdout, stderr]
# Console output handler which simulates typing
typing_console_handler = TypingConsoleHandler(stream=sys.stdout)
typing_console_handler.setLevel(logging.INFO)
typing_console_handler.setFormatter(console_formatter)
# User friendly output logger (text + speech)
user_friendly_output_logger = logging.getLogger(USER_FRIENDLY_OUTPUT_LOGGER)
user_friendly_output_logger.setLevel(logging.INFO)
user_friendly_output_logger.addHandler(
typing_console_handler if not plain_output else stdout
typing_console_handler if not plain_console_output else stdout
)
if tts_config:
user_friendly_output_logger.addHandler(TTSHandler(tts_config))
user_friendly_output_logger.addHandler(activity_log_handler)
user_friendly_output_logger.addHandler(error_log_handler)
user_friendly_output_logger.addHandler(stderr)
user_friendly_output_logger.propagate = False
# File output handlers
if log_file_format is not None:
if level < logging.ERROR:
file_output_format_template = TEXT_LOG_FORMAT_MAP[log_file_format]
file_output_formatter = AutoGptFormatter(
file_output_format_template, no_color=True
)
# INFO log file handler
activity_log_handler = logging.FileHandler(log_dir / LOG_FILE, "a", "utf-8")
activity_log_handler.setLevel(level)
activity_log_handler.setFormatter(file_output_formatter)
log_handlers += [activity_log_handler]
user_friendly_output_logger.addHandler(activity_log_handler)
# ERROR log file handler
error_log_handler = logging.FileHandler(log_dir / ERROR_LOG_FILE, "a", "utf-8")
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(
AutoGptFormatter(DEBUG_LOG_FORMAT, no_color=True)
)
log_handlers += [error_log_handler]
user_friendly_output_logger.addHandler(error_log_handler)
# Configure the root logger
logging.basicConfig(
format=console_format_template,
level=level,
handlers=log_handlers,
)
# Speech output
speech_output_logger = logging.getLogger(SPEECH_OUTPUT_LOGGER)
speech_output_logger.setLevel(logging.INFO)
if tts_config:

View File

@@ -1,6 +1,7 @@
import logging
from colorama import Style
from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler
from autogpt.core.runner.client_lib.logging import FancyConsoleFormatter
@@ -37,3 +38,16 @@ class AutoGptFormatter(FancyConsoleFormatter):
return remove_color_codes(super().format(record))
else:
return super().format(record)
class StructuredLoggingFormatter(StructuredLogHandler, logging.Formatter):
def __init__(self):
# Set up CloudLoggingFilter to add diagnostic info to the log records
self.cloud_logging_filter = CloudLoggingFilter()
# Init StructuredLogHandler
super().__init__()
def format(self, record: logging.LogRecord) -> str:
self.cloud_logging_filter.filter(record)
return super().format(record)

View File

@@ -1034,7 +1034,6 @@ files = [
{file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"},
{file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"},
{file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"},
{file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"},
{file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"},
{file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"},
{file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"},
@@ -1043,7 +1042,6 @@ files = [
{file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"},
{file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"},
{file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"},
{file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"},
{file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"},
{file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"},
{file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"},
@@ -1052,7 +1050,6 @@ files = [
{file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"},
{file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"},
{file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"},
{file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"},
{file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"},
{file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"},
{file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"},
@@ -1061,7 +1058,6 @@ files = [
{file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"},
{file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"},
{file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"},
{file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"},
{file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"},
{file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"},
{file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"},
@@ -1728,6 +1724,14 @@ files = [
[package.dependencies]
google-auth = ">=2.14.1,<3.0.dev0"
googleapis-common-protos = ">=1.56.2,<2.0.dev0"
grpcio = [
{version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
{version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
]
grpcio-status = [
{version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
requests = ">=2.18.0,<3.0.0.dev0"
@@ -1792,6 +1796,81 @@ files = [
google-auth = "*"
httplib2 = ">=0.19.0"
[[package]]
name = "google-cloud-appengine-logging"
version = "1.3.2"
description = "Google Cloud Appengine Logging API client library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-cloud-appengine-logging-1.3.2.tar.gz", hash = "sha256:a2989fca0e88463b56432aa821e64b81c3d171ee37b84771189b48e8b97cd496"},
{file = "google_cloud_appengine_logging-1.3.2-py2.py3-none-any.whl", hash = "sha256:6ac6261567b56611f6891fa650f76db8a48d528762e5c2a09230b41d82ee2be0"},
]
[package.dependencies]
google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
proto-plus = [
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
[[package]]
name = "google-cloud-audit-log"
version = "0.2.5"
description = "Google Cloud Audit Protos"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-cloud-audit-log-0.2.5.tar.gz", hash = "sha256:86e2faba3383adc8fd04a5bd7fd4f960b3e4aedaa7ed950f2f891ce16902eb6b"},
{file = "google_cloud_audit_log-0.2.5-py2.py3-none-any.whl", hash = "sha256:18b94d4579002a450b7902cd2e8b8fdcb1ea2dd4df3b41f8f82be6d9f7fcd746"},
]
[package.dependencies]
googleapis-common-protos = ">=1.56.2,<2.0dev"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
[[package]]
name = "google-cloud-core"
version = "2.3.3"
description = "Google Cloud API client core library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"},
{file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"},
]
[package.dependencies]
google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev"
google-auth = ">=1.25.0,<3.0dev"
[package.extras]
grpc = ["grpcio (>=1.38.0,<2.0dev)"]
[[package]]
name = "google-cloud-logging"
version = "3.8.0"
description = "Stackdriver Logging API client library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-cloud-logging-3.8.0.tar.gz", hash = "sha256:fdd916e59a84aa8c02e8148d7fdd3b3b623c57b0c1ff71f43297ce8e50fc1eab"},
{file = "google_cloud_logging-3.8.0-py2.py3-none-any.whl", hash = "sha256:c868b276b021cf5f32b6e8356b6cb3666357d149ad0fd798076043a5ec7ed988"},
]
[package.dependencies]
google-api-core = {version = ">=1.33.2,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]}
google-cloud-appengine-logging = ">=0.1.0,<2.0.0dev"
google-cloud-audit-log = ">=0.1.0,<1.0.0dev"
google-cloud-core = ">=2.0.0,<3.0.0dev"
grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
proto-plus = [
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
[[package]]
name = "googleapis-common-protos"
version = "1.61.0"
@@ -1804,6 +1883,7 @@ files = [
]
[package.dependencies]
grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""}
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
[package.extras]
@@ -1835,7 +1915,7 @@ files = [
{file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"},
{file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"},
{file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"},
{file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"},
{file = "greenlet-3.0.0-cp311-universal2-macosx_10_9_universal2.whl", hash = "sha256:c3692ecf3fe754c8c0f2c95ff19626584459eab110eaab66413b1e7425cd84e9"},
{file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"},
{file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"},
{file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"},
@@ -1845,6 +1925,7 @@ files = [
{file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"},
{file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"},
{file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"},
{file = "greenlet-3.0.0-cp312-universal2-macosx_10_9_universal2.whl", hash = "sha256:553d6fb2324e7f4f0899e5ad2c427a4579ed4873f42124beba763f16032959af"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"},
{file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"},
@@ -1884,6 +1965,22 @@ files = [
docs = ["Sphinx"]
test = ["objgraph", "psutil"]
[[package]]
name = "grpc-google-iam-v1"
version = "0.12.7"
description = "IAM API client library"
optional = false
python-versions = ">=3.7"
files = [
{file = "grpc-google-iam-v1-0.12.7.tar.gz", hash = "sha256:009197a7f1eaaa22149c96e5e054ac5934ba7241974e92663d8d3528a21203d1"},
{file = "grpc_google_iam_v1-0.12.7-py2.py3-none-any.whl", hash = "sha256:834da89f4c4a2abbe842a793ed20fc6d9a77011ef2626755b1b89116fb9596d7"},
]
[package.dependencies]
googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]}
grpcio = ">=1.44.0,<2.0.0dev"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
[[package]]
name = "grpcio"
version = "1.59.0"
@@ -1950,6 +2047,22 @@ files = [
[package.extras]
protobuf = ["grpcio-tools (>=1.59.0)"]
[[package]]
name = "grpcio-status"
version = "1.59.0"
description = "Status proto mapping for gRPC"
optional = false
python-versions = ">=3.6"
files = [
{file = "grpcio-status-1.59.0.tar.gz", hash = "sha256:f93b9c33e0a26162ef8431bfcffcc3e1fb217ccd8d7b5b3061b6e9f813e698b5"},
{file = "grpcio_status-1.59.0-py3-none-any.whl", hash = "sha256:cb5a222b14a80ee050bff9676623822e953bff0c50d2d29180de723652fdf10d"},
]
[package.dependencies]
googleapis-common-protos = ">=1.5.5"
grpcio = ">=1.59.0"
protobuf = ">=4.21.6"
[[package]]
name = "gtts"
version = "2.4.0"
@@ -3433,8 +3546,8 @@ files = [
[package.dependencies]
numpy = [
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
]
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
@@ -3781,6 +3894,23 @@ files = [
[package.dependencies]
wcwidth = "*"
[[package]]
name = "proto-plus"
version = "1.22.3"
description = "Beautiful, Pythonic protocol buffers."
optional = false
python-versions = ">=3.6"
files = [
{file = "proto-plus-1.22.3.tar.gz", hash = "sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b"},
{file = "proto_plus-1.22.3-py3-none-any.whl", hash = "sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df"},
]
[package.dependencies]
protobuf = ">=3.19.0,<5.0.0dev"
[package.extras]
testing = ["google-api-core[grpc] (>=1.31.5)"]
[[package]]
name = "protobuf"
version = "4.24.4"
@@ -6216,4 +6346,4 @@ benchmark = ["agbenchmark"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "5dbc7f12dea7604c66f7fa35187af4bc69e09670177c25ca74f3216bc9f8d23d"
content-hash = "af86e7b78bb4bb258df7e648fc99e5faed41bf320c4846831986aaff4eaa39ea"

View File

@@ -67,6 +67,7 @@ openapi-python-client = "^0.14.0"
# Benchmarking
# agbenchmark = { path = "../../benchmark", optional = true }
agbenchmark = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "benchmark", optional = true}
google-cloud-logging = "^3.8.0"
[tool.poetry.extras]
benchmark = ["agbenchmark"]

View File

@@ -13,7 +13,6 @@ from autogpt.core.resource.model_providers import ChatModelProvider, OpenAIProvi
from autogpt.file_workspace import FileWorkspace
from autogpt.llm.api_manager import ApiManager
from autogpt.logs.config import configure_logging
from autogpt.memory.vector import get_memory
from autogpt.models.command_registry import CommandRegistry
pytest_plugins = [
@@ -77,8 +76,9 @@ def config(
config.plugins_dir = "tests/unit/data/test_plugins"
config.plugins_config_file = temp_plugins_config_file
config.logging.log_dir = Path(__file__).parent / "logs"
config.logging.plain_console_output = True
config.noninteractive_mode = True
config.plain_output = True
# avoid circular dependency
from autogpt.plugins.plugins_config import PluginsConfig
@@ -93,11 +93,7 @@ def config(
@pytest.fixture(scope="session")
def setup_logger(config: Config):
configure_logging(
debug_mode=config.debug_mode,
plain_output=config.plain_output,
log_dir=Path(__file__).parent / "logs",
)
configure_logging(**config.logging.dict())
@pytest.fixture()

View File

@@ -18,7 +18,6 @@ def test_initial_values(config: Config) -> None:
"""
Test if the initial values of the config class attributes are set correctly.
"""
assert config.debug_mode is False
assert config.continuous_mode is False
assert config.tts_config.speak_mode is False
assert config.fast_llm == "gpt-3.5-turbo-16k"
@@ -81,20 +80,6 @@ def test_set_smart_llm(config: Config) -> None:
config.smart_llm = smart_llm
def test_set_debug_mode(config: Config) -> None:
"""
Test if the set_debug_mode() method updates the debug_mode attribute.
"""
# Store debug mode to reset it after the test
debug_mode = config.debug_mode
config.debug_mode = True
assert config.debug_mode is True
# Reset debug mode
config.debug_mode = debug_mode
@patch("openai.Model.list")
def test_smart_and_fast_llms_set_to_gpt4(mock_list_models: Any, config: Config) -> None:
"""