Fix path processing (#5032)

* Fix and clean up path processing in logs module

* Fix path processing throughout the project

* Fix plugins test

* Fix borky pytest vs mkdir(exist_ok=True)

* Update docs and gitignore for new workspace location

* Fix borky pytest vol.2

* ok james
This commit is contained in:
Reinier van der Leer
2023-07-21 20:36:15 +02:00
committed by GitHub
parent e0d8e6b75f
commit 2c53530e99
21 changed files with 95 additions and 118 deletions

2
.gitignore vendored
View File

@@ -1,7 +1,7 @@
## Original ignores ## Original ignores
autogpt/keys.py autogpt/keys.py
autogpt/*.json autogpt/*.json
**/auto_gpt_workspace/* auto_gpt_workspace/*
*.mpeg *.mpeg
.env .env
azure.yaml azure.yaml

View File

@@ -1,4 +1,5 @@
"""Main script for the autogpt package.""" """Main script for the autogpt package."""
from pathlib import Path
from typing import Optional from typing import Optional
import click import click
@@ -115,24 +116,25 @@ def main(
if ctx.invoked_subcommand is None: if ctx.invoked_subcommand is None:
run_auto_gpt( run_auto_gpt(
continuous, continuous=continuous,
continuous_limit, continuous_limit=continuous_limit,
ai_settings, ai_settings=ai_settings,
prompt_settings, prompt_settings=prompt_settings,
skip_reprompt, skip_reprompt=skip_reprompt,
speak, speak=speak,
debug, debug=debug,
gpt3only, gpt3only=gpt3only,
gpt4only, gpt4only=gpt4only,
memory_type, memory_type=memory_type,
browser_name, browser_name=browser_name,
allow_downloads, allow_downloads=allow_downloads,
skip_news, skip_news=skip_news,
workspace_directory, working_directory=Path(__file__).parent.parent, # TODO: make this an option
install_plugin_deps, workspace_directory=workspace_directory,
ai_name, install_plugin_deps=install_plugin_deps,
ai_role, ai_name=ai_name,
ai_goal, ai_role=ai_role,
ai_goals=ai_goal,
) )

View File

@@ -150,7 +150,7 @@ def execute_python_file(filename: str, agent: Agent) -> str:
file_path.relative_to(agent.workspace.root).as_posix(), file_path.relative_to(agent.workspace.root).as_posix(),
], ],
volumes={ volumes={
agent.config.workspace_path: { str(agent.config.workspace_path): {
"bind": "/workspace", "bind": "/workspace",
"mode": "rw", "mode": "rw",
} }

View File

@@ -37,7 +37,7 @@ def generate_image(prompt: str, agent: Agent, size: int = 256) -> str:
Returns: Returns:
str: The filename of the image str: The filename of the image
""" """
filename = f"{agent.config.workspace_path}/{str(uuid.uuid4())}.jpg" filename = agent.config.workspace_path / f"{str(uuid.uuid4())}.jpg"
# DALL-E # DALL-E
if agent.config.image_provider == "dalle": if agent.config.image_provider == "dalle":

View File

@@ -4,7 +4,6 @@ A module that contains the AIConfig class object that contains the configuration
""" """
from __future__ import annotations from __future__ import annotations
import os
import platform import platform
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
@@ -16,9 +15,6 @@ if TYPE_CHECKING:
from autogpt.models.command_registry import CommandRegistry from autogpt.models.command_registry import CommandRegistry
from autogpt.prompts.generator import PromptGenerator from autogpt.prompts.generator import PromptGenerator
# Soon this will go in a folder where it remembers more stuff about the run(s)
SAVE_FILE = str(Path(os.getcwd()) / "ai_settings.yaml")
class AIConfig: class AIConfig:
""" """
@@ -57,14 +53,13 @@ class AIConfig:
self.command_registry: CommandRegistry | None = None self.command_registry: CommandRegistry | None = None
@staticmethod @staticmethod
def load(ai_settings_file: str = SAVE_FILE) -> "AIConfig": def load(ai_settings_file: str | Path) -> "AIConfig":
""" """
Returns class object with parameters (ai_name, ai_role, ai_goals, api_budget) Returns class object with parameters (ai_name, ai_role, ai_goals, api_budget)
loaded from yaml file if yaml file exists, else returns class with no parameters. loaded from yaml file if yaml file exists, else returns class with no parameters.
Parameters: Parameters:
ai_settings_file (int): The path to the config yaml file. ai_settings_file (Path): The path to the config yaml file.
DEFAULT: "../ai_settings.yaml"
Returns: Returns:
cls (object): An instance of given cls object cls (object): An instance of given cls object
@@ -85,16 +80,15 @@ class AIConfig:
for goal in config_params.get("ai_goals", []) for goal in config_params.get("ai_goals", [])
] ]
api_budget = config_params.get("api_budget", 0.0) api_budget = config_params.get("api_budget", 0.0)
# type: Type[AIConfig]
return AIConfig(ai_name, ai_role, ai_goals, api_budget) return AIConfig(ai_name, ai_role, ai_goals, api_budget)
def save(self, ai_settings_file: str = SAVE_FILE) -> None: def save(self, ai_settings_file: str | Path) -> None:
""" """
Saves the class parameters to the specified file yaml file path as a yaml file. Saves the class parameters to the specified file yaml file path as a yaml file.
Parameters: Parameters:
ai_settings_file(str): The path to the config yaml file. ai_settings_file (Path): The path to the config yaml file.
DEFAULT: "../ai_settings.yaml"
Returns: Returns:
None None

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import contextlib import contextlib
import os import os
import re import re
from pathlib import Path
from typing import Any, Dict, Optional, Union from typing import Any, Dict, Optional, Union
import yaml import yaml
@@ -14,10 +15,8 @@ from pydantic import Field, validator
from autogpt.core.configuration.schema import Configurable, SystemSettings from autogpt.core.configuration.schema import Configurable, SystemSettings
from autogpt.plugins.plugins_config import PluginsConfig from autogpt.plugins.plugins_config import PluginsConfig
AZURE_CONFIG_FILE = os.path.join(os.path.dirname(__file__), "../..", "azure.yaml") AZURE_CONFIG_FILE = "azure.yaml"
PLUGINS_CONFIG_FILE = os.path.join( PLUGINS_CONFIG_FILE = "plugins_config.yaml"
os.path.dirname(__file__), "../..", "plugins_config.yaml"
)
GPT_4_MODEL = "gpt-4" GPT_4_MODEL = "gpt-4"
GPT_3_MODEL = "gpt-3.5-turbo" GPT_3_MODEL = "gpt-3.5-turbo"
@@ -47,7 +46,8 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
# Paths # Paths
ai_settings_file: str = "ai_settings.yaml" ai_settings_file: str = "ai_settings.yaml"
prompt_settings_file: str = "prompt_settings.yaml" prompt_settings_file: str = "prompt_settings.yaml"
workspace_path: Optional[str] = None workdir: Path = None
workspace_path: Optional[Path] = None
file_logger_path: Optional[str] = None file_logger_path: Optional[str] = None
# Model configuration # Model configuration
fast_llm: str = "gpt-3.5-turbo" fast_llm: str = "gpt-3.5-turbo"
@@ -210,9 +210,10 @@ class ConfigBuilder(Configurable[Config]):
default_settings = Config() default_settings = Config()
@classmethod @classmethod
def build_config_from_env(cls) -> Config: def build_config_from_env(cls, workdir: Path) -> Config:
"""Initialize the Config class""" """Initialize the Config class"""
config_dict = { config_dict = {
"workdir": workdir,
"authorise_key": os.getenv("AUTHORISE_COMMAND_KEY"), "authorise_key": os.getenv("AUTHORISE_COMMAND_KEY"),
"exit_key": os.getenv("EXIT_KEY"), "exit_key": os.getenv("EXIT_KEY"),
"plain_output": os.getenv("PLAIN_OUTPUT", "False") == "True", "plain_output": os.getenv("PLAIN_OUTPUT", "False") == "True",
@@ -299,7 +300,9 @@ class ConfigBuilder(Configurable[Config]):
config_dict["temperature"] = float(os.getenv("TEMPERATURE")) config_dict["temperature"] = float(os.getenv("TEMPERATURE"))
if config_dict["use_azure"]: if config_dict["use_azure"]:
azure_config = cls.load_azure_config(config_dict["azure_config_file"]) azure_config = cls.load_azure_config(
workdir / config_dict["azure_config_file"]
)
config_dict.update(azure_config) config_dict.update(azure_config)
elif os.getenv("OPENAI_API_BASE_URL"): elif os.getenv("OPENAI_API_BASE_URL"):
@@ -318,7 +321,7 @@ class ConfigBuilder(Configurable[Config]):
# Set secondary config variables (that depend on other config variables) # Set secondary config variables (that depend on other config variables)
config.plugins_config = PluginsConfig.load_config( config.plugins_config = PluginsConfig.load_config(
config.plugins_config_file, config.workdir / config.plugins_config_file,
config.plugins_denylist, config.plugins_denylist,
config.plugins_allowlist, config.plugins_allowlist,
) )
@@ -326,13 +329,13 @@ class ConfigBuilder(Configurable[Config]):
return config return config
@classmethod @classmethod
def load_azure_config(cls, config_file: str = AZURE_CONFIG_FILE) -> Dict[str, str]: def load_azure_config(cls, config_file: Path) -> Dict[str, str]:
""" """
Loads the configuration parameters for Azure hosting from the specified file Loads the configuration parameters for Azure hosting from the specified file
path as a yaml file. path as a yaml file.
Parameters: Parameters:
config_file(str): The path to the config yaml file. DEFAULT: "../azure.yaml" config_file (Path): The path to the config yaml file.
Returns: Returns:
Dict Dict

View File

@@ -2,6 +2,7 @@ import json
import logging import logging
import random import random
import time import time
from pathlib import Path
class ConsoleHandler(logging.StreamHandler): class ConsoleHandler(logging.StreamHandler):
@@ -38,7 +39,7 @@ class TypingConsoleHandler(logging.StreamHandler):
class JsonFileHandler(logging.FileHandler): class JsonFileHandler(logging.FileHandler):
def __init__(self, filename: str, mode="a", encoding=None, delay=False): def __init__(self, filename: str | Path, mode="a", encoding=None, delay=False):
super().__init__(filename, mode, encoding, delay) super().__init__(filename, mode, encoding, delay)
def emit(self, record: logging.LogRecord): def emit(self, record: logging.LogRecord):

View File

@@ -1,5 +1,6 @@
import json import json
import os import os
from pathlib import Path
from typing import Any, Dict, Union from typing import Any, Dict, Union
from .logger import logger from .logger import logger
@@ -23,38 +24,33 @@ class LogCycleHandler:
def __init__(self): def __init__(self):
self.log_count_within_cycle = 0 self.log_count_within_cycle = 0
@staticmethod def create_outer_directory(self, ai_name: str, created_at: str) -> Path:
def create_directory_if_not_exists(directory_path: str) -> None:
if not os.path.exists(directory_path):
os.makedirs(directory_path, exist_ok=True)
def create_outer_directory(self, ai_name: str, created_at: str) -> str:
log_directory = logger.get_log_directory()
if os.environ.get("OVERWRITE_DEBUG") == "1": if os.environ.get("OVERWRITE_DEBUG") == "1":
outer_folder_name = "auto_gpt" outer_folder_name = "auto_gpt"
else: else:
ai_name_short = self.get_agent_short_name(ai_name) ai_name_short = self.get_agent_short_name(ai_name)
outer_folder_name = f"{created_at}_{ai_name_short}" outer_folder_name = f"{created_at}_{ai_name_short}"
outer_folder_path = os.path.join(log_directory, "DEBUG", outer_folder_name) outer_folder_path = logger.log_dir / "DEBUG" / outer_folder_name
self.create_directory_if_not_exists(outer_folder_path) if not outer_folder_path.exists():
outer_folder_path.mkdir(parents=True)
return outer_folder_path return outer_folder_path
def get_agent_short_name(self, ai_name: str) -> str: def get_agent_short_name(self, ai_name: str) -> str:
return ai_name[:15].rstrip() if ai_name else DEFAULT_PREFIX return ai_name[:15].rstrip() if ai_name else DEFAULT_PREFIX
def create_inner_directory(self, outer_folder_path: str, cycle_count: int) -> str: def create_inner_directory(self, outer_folder_path: Path, cycle_count: int) -> Path:
nested_folder_name = str(cycle_count).zfill(3) nested_folder_name = str(cycle_count).zfill(3)
nested_folder_path = os.path.join(outer_folder_path, nested_folder_name) nested_folder_path = outer_folder_path / nested_folder_name
self.create_directory_if_not_exists(nested_folder_path) if not nested_folder_path.exists():
nested_folder_path.mkdir()
return nested_folder_path return nested_folder_path
def create_nested_directory( def create_nested_directory(
self, ai_name: str, created_at: str, cycle_count: int self, ai_name: str, created_at: str, cycle_count: int
) -> str: ) -> Path:
outer_folder_path = self.create_outer_directory(ai_name, created_at) outer_folder_path = self.create_outer_directory(ai_name, created_at)
nested_folder_path = self.create_inner_directory(outer_folder_path, cycle_count) nested_folder_path = self.create_inner_directory(outer_folder_path, cycle_count)
@@ -75,14 +71,10 @@ class LogCycleHandler:
data (Any): The data to be logged. data (Any): The data to be logged.
file_name (str): The name of the file to save the logged data. file_name (str): The name of the file to save the logged data.
""" """
nested_folder_path = self.create_nested_directory( cycle_log_dir = self.create_nested_directory(ai_name, created_at, cycle_count)
ai_name, created_at, cycle_count
)
json_data = json.dumps(data, ensure_ascii=False, indent=4) json_data = json.dumps(data, ensure_ascii=False, indent=4)
log_file_path = os.path.join( log_file_path = cycle_log_dir / f"{self.log_count_within_cycle}_{file_name}"
nested_folder_path, f"{self.log_count_within_cycle}_{file_name}"
)
logger.log_json(json_data, log_file_path) logger.log_json(json_data, log_file_path)
self.log_count_within_cycle += 1 self.log_count_within_cycle += 1

View File

@@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
import logging import logging
import os from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional from typing import TYPE_CHECKING, Any, Optional
from colorama import Fore from colorama import Fore
@@ -25,10 +25,10 @@ class Logger(metaclass=Singleton):
def __init__(self): def __init__(self):
# create log directory if it doesn't exist # create log directory if it doesn't exist
this_files_dir_path = os.path.dirname(__file__) # TODO: use workdir from config
log_dir = os.path.join(this_files_dir_path, "../logs") self.log_dir = Path(__file__).parent.parent.parent / "logs"
if not os.path.exists(log_dir): if not self.log_dir.exists():
os.makedirs(log_dir) self.log_dir.mkdir()
log_file = "activity.log" log_file = "activity.log"
error_file = "error.log" error_file = "error.log"
@@ -46,9 +46,7 @@ class Logger(metaclass=Singleton):
self.console_handler.setFormatter(console_formatter) self.console_handler.setFormatter(console_formatter)
# Info handler in activity.log # Info handler in activity.log
self.file_handler = logging.FileHandler( self.file_handler = logging.FileHandler(self.log_dir / log_file, "a", "utf-8")
os.path.join(log_dir, log_file), "a", "utf-8"
)
self.file_handler.setLevel(logging.DEBUG) self.file_handler.setLevel(logging.DEBUG)
info_formatter = AutoGptFormatter( info_formatter = AutoGptFormatter(
"%(asctime)s %(levelname)s %(title)s %(message_no_color)s" "%(asctime)s %(levelname)s %(title)s %(message_no_color)s"
@@ -56,9 +54,7 @@ class Logger(metaclass=Singleton):
self.file_handler.setFormatter(info_formatter) self.file_handler.setFormatter(info_formatter)
# Error handler error.log # Error handler error.log
error_handler = logging.FileHandler( error_handler = logging.FileHandler(self.log_dir / error_file, "a", "utf-8")
os.path.join(log_dir, error_file), "a", "utf-8"
)
error_handler.setLevel(logging.ERROR) error_handler.setLevel(logging.ERROR)
error_formatter = AutoGptFormatter( error_formatter = AutoGptFormatter(
"%(asctime)s %(levelname)s %(module)s:%(funcName)s:%(lineno)d %(title)s" "%(asctime)s %(levelname)s %(module)s:%(funcName)s:%(lineno)d %(title)s"
@@ -179,13 +175,9 @@ class Logger(metaclass=Singleton):
self.typewriter_log("DOUBLE CHECK CONFIGURATION", Fore.YELLOW, additionalText) self.typewriter_log("DOUBLE CHECK CONFIGURATION", Fore.YELLOW, additionalText)
def log_json(self, data: Any, file_name: str) -> None: def log_json(self, data: Any, file_name: str | Path) -> None:
# Define log directory
this_files_dir_path = os.path.dirname(__file__)
log_dir = os.path.join(this_files_dir_path, "../logs")
# Create a handler for JSON files # Create a handler for JSON files
json_file_path = os.path.join(log_dir, file_name) json_file_path = self.log_dir / file_name
json_data_handler = JsonFileHandler(json_file_path) json_data_handler = JsonFileHandler(json_file_path)
json_data_handler.setFormatter(JsonFormatter()) json_data_handler.setFormatter(JsonFormatter())
@@ -194,10 +186,5 @@ class Logger(metaclass=Singleton):
self.json_logger.debug(data) self.json_logger.debug(data)
self.json_logger.removeHandler(json_data_handler) self.json_logger.removeHandler(json_data_handler)
def get_log_directory(self) -> str:
this_files_dir_path = os.path.dirname(__file__)
log_dir = os.path.join(this_files_dir_path, "../../logs")
return os.path.abspath(log_dir)
logger = Logger() logger = Logger()

View File

@@ -53,6 +53,7 @@ def run_auto_gpt(
browser_name: str, browser_name: str,
allow_downloads: bool, allow_downloads: bool,
skip_news: bool, skip_news: bool,
working_directory: Path,
workspace_directory: str | Path, workspace_directory: str | Path,
install_plugin_deps: bool, install_plugin_deps: bool,
ai_name: Optional[str] = None, ai_name: Optional[str] = None,
@@ -62,7 +63,8 @@ def run_auto_gpt(
# Configure logging before we do anything else. # Configure logging before we do anything else.
logger.set_level(logging.DEBUG if debug else logging.INFO) logger.set_level(logging.DEBUG if debug else logging.INFO)
config = ConfigBuilder.build_config_from_env() config = ConfigBuilder.build_config_from_env(workdir=working_directory)
# HACK: This is a hack to allow the config into the logger without having to pass it around everywhere # HACK: This is a hack to allow the config into the logger without having to pass it around everywhere
# or import it directly. # or import it directly.
logger.config = config logger.config = config
@@ -129,10 +131,10 @@ def run_auto_gpt(
# TODO: have this directory live outside the repository (e.g. in a user's # TODO: have this directory live outside the repository (e.g. in a user's
# home directory) and have it come in as a command line argument or part of # home directory) and have it come in as a command line argument or part of
# the env file. # the env file.
workspace_directory = Workspace.get_workspace_directory(config, workspace_directory) Workspace.set_workspace_directory(config, workspace_directory)
# HACK: doing this here to collect some globals that depend on the workspace. # HACK: doing this here to collect some globals that depend on the workspace.
Workspace.build_file_logger_path(config, workspace_directory) Workspace.build_file_logger_path(config, config.workspace_path)
config.plugins = scan_plugins(config, config.debug_mode) config.plugins = scan_plugins(config, config.debug_mode)
# Create a CommandRegistry instance and scan default folder # Create a CommandRegistry instance and scan default folder

View File

@@ -29,8 +29,7 @@ class JSONFileMemory(VectorMemoryProvider):
Returns: Returns:
None None
""" """
workspace_path = Path(config.workspace_path) self.file_path = config.workspace_path / f"{config.memory_index}.json"
self.file_path = workspace_path / f"{config.memory_index}.json"
self.file_path.touch() self.file_path.touch()
logger.debug( logger.debug(
f"Initialized {__class__.__name__} with index path {self.file_path}" f"Initialized {__class__.__name__} with index path {self.file_path}"

View File

@@ -23,10 +23,6 @@ if TYPE_CHECKING:
from autogpt.logs import logger from autogpt.logs import logger
from autogpt.models.base_open_ai_plugin import BaseOpenAIPlugin from autogpt.models.base_open_ai_plugin import BaseOpenAIPlugin
DEFAULT_PLUGINS_CONFIG_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "..", "..", "plugins_config.yaml"
)
def inspect_zip_for_modules(zip_path: str, debug: bool = False) -> list[str]: def inspect_zip_for_modules(zip_path: str, debug: bool = False) -> list[str]:
""" """

View File

@@ -1,6 +1,6 @@
from __future__ import annotations from __future__ import annotations
import os from pathlib import Path
from typing import Union from typing import Union
import yaml import yaml
@@ -28,7 +28,7 @@ class PluginsConfig(BaseModel):
@classmethod @classmethod
def load_config( def load_config(
cls, cls,
plugins_config_file: str, plugins_config_file: Path,
plugins_denylist: list[str], plugins_denylist: list[str],
plugins_allowlist: list[str], plugins_allowlist: list[str],
) -> "PluginsConfig": ) -> "PluginsConfig":
@@ -56,11 +56,11 @@ class PluginsConfig(BaseModel):
@classmethod @classmethod
def deserialize_config_file( def deserialize_config_file(
cls, cls,
plugins_config_file: str, plugins_config_file: Path,
plugins_denylist: list[str], plugins_denylist: list[str],
plugins_allowlist: list[str], plugins_allowlist: list[str],
) -> dict[str, PluginConfig]: ) -> dict[str, PluginConfig]:
if not os.path.exists(plugins_config_file): if not plugins_config_file.is_file():
logger.warn("plugins_config.yaml does not exist, creating base config.") logger.warn("plugins_config.yaml does not exist, creating base config.")
cls.create_empty_plugins_config( cls.create_empty_plugins_config(
plugins_config_file, plugins_config_file,
@@ -87,7 +87,7 @@ class PluginsConfig(BaseModel):
@staticmethod @staticmethod
def create_empty_plugins_config( def create_empty_plugins_config(
plugins_config_file: str, plugins_config_file: Path,
plugins_denylist: list[str], plugins_denylist: list[str],
plugins_allowlist: list[str], plugins_allowlist: list[str],
): ):

View File

@@ -55,7 +55,7 @@ def construct_main_ai_config(
Returns: Returns:
str: The prompt string str: The prompt string
""" """
ai_config = AIConfig.load(config.ai_settings_file) ai_config = AIConfig.load(config.workdir / config.ai_settings_file)
# Apply overrides # Apply overrides
if name: if name:
@@ -99,7 +99,7 @@ Continue ({config.authorise_key}/{config.exit_key}): """,
if any([not ai_config.ai_name, not ai_config.ai_role, not ai_config.ai_goals]): if any([not ai_config.ai_name, not ai_config.ai_role, not ai_config.ai_goals]):
ai_config = prompt_user(config) ai_config = prompt_user(config)
ai_config.save(config.ai_settings_file) ai_config.save(config.workdir / config.ai_settings_file)
if config.restrict_to_workspace: if config.restrict_to_workspace:
logger.typewriter_log( logger.typewriter_log(

View File

@@ -152,15 +152,13 @@ class Workspace:
config.file_logger_path = str(file_logger_path) config.file_logger_path = str(file_logger_path)
@staticmethod @staticmethod
def get_workspace_directory( def set_workspace_directory(
config: Config, workspace_directory: Optional[str | Path] = None config: Config, workspace_directory: Optional[str | Path] = None
): ) -> None:
if workspace_directory is None: if workspace_directory is None:
workspace_directory = Path(__file__).parent / "auto_gpt_workspace" workspace_directory = config.workdir / "auto_gpt_workspace"
elif type(workspace_directory) == str: elif type(workspace_directory) == str:
workspace_directory = Path(workspace_directory) workspace_directory = Path(workspace_directory)
# TODO: pass in the ai_settings file and the env file and have them cloned into # TODO: pass in the ai_settings file and the env file and have them cloned into
# the workspace directory so we can bind them to the agent. # the workspace directory so we can bind them to the agent.
workspace_directory = Workspace.make_workspace(workspace_directory) config.workspace_path = Workspace.make_workspace(workspace_directory)
config.workspace_path = str(workspace_directory)
return workspace_directory

View File

@@ -1,3 +1,5 @@
from pathlib import Path
from autogpt.agents import Agent from autogpt.agents import Agent
from autogpt.config import AIConfig, Config, ConfigBuilder from autogpt.config import AIConfig, Config, ConfigBuilder
from autogpt.main import COMMAND_CATEGORIES, run_interaction_loop from autogpt.main import COMMAND_CATEGORIES, run_interaction_loop
@@ -6,6 +8,8 @@ from autogpt.models.command_registry import CommandRegistry
from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT
from autogpt.workspace import Workspace from autogpt.workspace import Workspace
PROJECT_DIR = Path().resolve()
def run_task(task) -> None: def run_task(task) -> None:
agent = bootstrap_agent(task) agent = bootstrap_agent(task)
@@ -13,15 +17,14 @@ def run_task(task) -> None:
def bootstrap_agent(task): def bootstrap_agent(task):
config = ConfigBuilder.build_config_from_env() config = ConfigBuilder.build_config_from_env(workdir=PROJECT_DIR)
config.continuous_mode = False config.continuous_mode = False
config.temperature = 0 config.temperature = 0
config.plain_output = True config.plain_output = True
command_registry = get_command_registry(config) command_registry = get_command_registry(config)
config.memory_backend = "no_memory" config.memory_backend = "no_memory"
workspace_directory = Workspace.get_workspace_directory(config) Workspace.set_workspace_directory(config)
workspace_directory_path = Workspace.make_workspace(workspace_directory) Workspace.build_file_logger_path(config, config.workspace_path)
Workspace.build_file_logger_path(config, workspace_directory_path)
ai_config = AIConfig( ai_config = AIConfig(
ai_name="Auto-GPT", ai_name="Auto-GPT",
ai_role="a multi-purpose AI assistant.", ai_role="a multi-purpose AI assistant.",
@@ -34,7 +37,7 @@ def bootstrap_agent(task):
ai_config=ai_config, ai_config=ai_config,
config=config, config=config,
triggering_prompt=DEFAULT_TRIGGERING_PROMPT, triggering_prompt=DEFAULT_TRIGGERING_PROMPT,
workspace_directory=str(workspace_directory_path), workspace_directory=str(config.workspace_path),
) )

View File

@@ -173,7 +173,7 @@ options:
# python data_ingestion.py --dir DataFolder --init --overlap 100 --max_length 2000 # python data_ingestion.py --dir DataFolder --init --overlap 100 --max_length 2000
``` ```
In the example above, the script initializes the memory, ingests all files within the `Auto-Gpt/autogpt/auto_gpt_workspace/DataFolder` directory into memory with an overlap between chunks of 100 and a maximum length of each chunk of 2000. In the example above, the script initializes the memory, ingests all files within the `Auto-Gpt/auto_gpt_workspace/DataFolder` directory into memory with an overlap between chunks of 100 and a maximum length of each chunk of 2000.
Note that you can also use the `--file` argument to ingest a single file into memory and that data_ingestion.py will only ingest files within the `/auto_gpt_workspace` directory. Note that you can also use the `--file` argument to ingest a single file into memory and that data_ingestion.py will only ingest files within the `/auto_gpt_workspace` directory.

View File

@@ -51,7 +51,7 @@ Get your OpenAI API key from: [https://platform.openai.com/account/api-keys](htt
- .env - .env
profiles: ["exclude-from-up"] profiles: ["exclude-from-up"]
volumes: volumes:
- ./auto_gpt_workspace:/app/autogpt/auto_gpt_workspace - ./auto_gpt_workspace:/app/auto_gpt_workspace
- ./data:/app/data - ./data:/app/data
## allow auto-gpt to write logs to disk ## allow auto-gpt to write logs to disk
- ./logs:/app/logs - ./logs:/app/logs

View File

@@ -48,7 +48,7 @@ def temp_plugins_config_file():
def config( def config(
temp_plugins_config_file: str, mocker: MockerFixture, workspace: Workspace temp_plugins_config_file: str, mocker: MockerFixture, workspace: Workspace
) -> Config: ) -> Config:
config = ConfigBuilder.build_config_from_env() config = ConfigBuilder.build_config_from_env(workspace.root.parent)
if not os.environ.get("OPENAI_API_KEY"): if not os.environ.get("OPENAI_API_KEY"):
os.environ["OPENAI_API_KEY"] = "sk-dummy" os.environ["OPENAI_API_KEY"] = "sk-dummy"

View File

@@ -161,7 +161,7 @@ azure_model_map:
os.environ["USE_AZURE"] = "True" os.environ["USE_AZURE"] = "True"
os.environ["AZURE_CONFIG_FILE"] = str(config_file) os.environ["AZURE_CONFIG_FILE"] = str(config_file)
config = ConfigBuilder.build_config_from_env() config = ConfigBuilder.build_config_from_env(workspace.root.parent)
assert config.openai_api_type == "azure" assert config.openai_api_type == "azure"
assert config.openai_api_base == "https://dummy.openai.azure.com" assert config.openai_api_base == "https://dummy.openai.azure.com"

View File

@@ -71,7 +71,7 @@ def test_create_base_config(config: Config):
os.remove(config.plugins_config_file) os.remove(config.plugins_config_file)
plugins_config = PluginsConfig.load_config( plugins_config = PluginsConfig.load_config(
plugins_config_file=config.plugins_config_file, plugins_config_file=config.workdir / config.plugins_config_file,
plugins_denylist=config.plugins_denylist, plugins_denylist=config.plugins_denylist,
plugins_allowlist=config.plugins_allowlist, plugins_allowlist=config.plugins_allowlist,
) )
@@ -107,7 +107,7 @@ def test_load_config(config: Config):
# Load the config from disk # Load the config from disk
plugins_config = PluginsConfig.load_config( plugins_config = PluginsConfig.load_config(
plugins_config_file=config.plugins_config_file, plugins_config_file=config.workdir / config.plugins_config_file,
plugins_denylist=config.plugins_denylist, plugins_denylist=config.plugins_denylist,
plugins_allowlist=config.plugins_allowlist, plugins_allowlist=config.plugins_allowlist,
) )