From 34352afd53da965449e84be379bcc3a6ff2ce63d Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Sun, 8 Oct 2023 01:04:53 -0700 Subject: [PATCH] Create AgentFileManager * Remove references to (broken) vector memory * Move workspace setup to `WorkspaceMixin.attach_fs` hook * Move directives into `BaseAgentSettings` --- .../autogpt/agbenchmark_config/benchmarks.py | 10 ++-- autogpts/autogpt/autogpt/agents/agent.py | 20 +++---- autogpts/autogpt/autogpt/agents/base.py | 57 +++++++++++++------ .../autogpt/agents/features/workspace.py | 38 +++++++++---- .../agents/prompt_strategies/one_shot.py | 5 +- .../agents/utils/agent_file_manager.py | 37 ++++++++++++ autogpts/autogpt/autogpt/app/main.py | 38 ++++--------- .../autogpt/autogpt/commands/execute_code.py | 2 +- .../autogpt/commands/file_operations.py | 14 ++--- .../autogpt/autogpt/commands/image_gen.py | 2 +- .../autogpt/autogpt/config/ai_directives.py | 12 ++-- autogpts/autogpt/autogpt/config/config.py | 2 - .../core/runner/cli_web_app/server/api.py | 5 -- .../autogpt/autogpt/workspace/workspace.py | 46 +-------------- .../data/agents/AutoGPT/workspace/main.py | 0 autogpts/autogpt/tests/conftest.py | 55 +++++++++++------- .../tests/integration/agent_factory.py | 1 - .../tests/integration/test_execute_code.py | 9 ++- .../tests/unit/test_file_operations.py | 8 +-- 19 files changed, 192 insertions(+), 169 deletions(-) create mode 100644 autogpts/autogpt/autogpt/agents/utils/agent_file_manager.py create mode 100644 autogpts/autogpt/data/agents/AutoGPT/workspace/main.py diff --git a/autogpts/autogpt/agbenchmark_config/benchmarks.py b/autogpts/autogpt/agbenchmark_config/benchmarks.py index 63a82445..46748658 100644 --- a/autogpts/autogpt/agbenchmark_config/benchmarks.py +++ b/autogpts/autogpt/agbenchmark_config/benchmarks.py @@ -7,9 +7,7 @@ from autogpt.app.main import _configure_openai_provider, run_interaction_loop from autogpt.commands import COMMAND_CATEGORIES from autogpt.config import AIProfile, ConfigBuilder from autogpt.logs.config import configure_logging -from autogpt.memory.vector import get_memory from autogpt.models.command_registry import CommandRegistry -from autogpt.workspace import Workspace LOG_DIR = Path(__file__).parent / "logs" @@ -28,8 +26,6 @@ def bootstrap_agent(task: str, continuous_mode: bool) -> Agent: config.noninteractive_mode = True config.plain_output = True config.memory_backend = "no_memory" - config.workspace_path = Workspace.init_workspace_directory(config) - config.file_logger_path = Workspace.build_file_logger_path(config.workspace_path) configure_logging( debug_mode=config.debug_mode, @@ -54,6 +50,7 @@ def bootstrap_agent(task: str, continuous_mode: bool) -> Agent: config=AgentConfiguration( fast_llm=config.fast_llm, smart_llm=config.smart_llm, + allow_fs_access=not config.restrict_to_workspace, use_functions_api=config.openai_functions, plugins=config.plugins, ), @@ -61,13 +58,14 @@ def bootstrap_agent(task: str, continuous_mode: bool) -> Agent: history=Agent.default_settings.history.copy(deep=True), ) - return Agent( + agent = Agent( settings=agent_settings, llm_provider=_configure_openai_provider(config), command_registry=command_registry, - memory=get_memory(config), legacy_config=config, ) + agent.attach_fs(config.app_data_dir / "agents" / "AutoGPT-benchmark") # HACK + return agent if __name__ == "__main__": diff --git a/autogpts/autogpt/autogpt/agents/agent.py b/autogpts/autogpt/autogpt/agents/agent.py index 18697ef4..94e15eb6 100644 --- a/autogpts/autogpt/autogpt/agents/agent.py +++ b/autogpts/autogpt/autogpt/agents/agent.py @@ -8,10 +8,10 @@ from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from autogpt.config import Config - from autogpt.memory.vector import VectorMemory from autogpt.models.command_registry import CommandRegistry -from autogpt.config import AIProfile +from pydantic import Field + from autogpt.core.configuration import Configurable from autogpt.core.prompting import ChatPrompt from autogpt.core.resource.model_providers import ( @@ -54,8 +54,12 @@ class AgentConfiguration(BaseAgentConfiguration): class AgentSettings(BaseAgentSettings): - config: AgentConfiguration - prompt_config: OneShotAgentPromptConfiguration + config: AgentConfiguration = Field(default_factory=AgentConfiguration) + prompt_config: OneShotAgentPromptConfiguration = Field( + default_factory=( + lambda: OneShotAgentPromptStrategy.default_configuration.copy(deep=True) + ) + ) class Agent( @@ -70,10 +74,6 @@ class Agent( default_settings: AgentSettings = AgentSettings( name="Agent", description=__doc__, - ai_profile=AIProfile(ai_name="AutoGPT"), - config=AgentConfiguration(), - prompt_config=OneShotAgentPromptStrategy.default_configuration, - history=BaseAgent.default_settings.history, ) def __init__( @@ -81,7 +81,6 @@ class Agent( settings: AgentSettings, llm_provider: ChatModelProvider, command_registry: CommandRegistry, - memory: VectorMemory, legacy_config: Config, ): prompt_strategy = OneShotAgentPromptStrategy( @@ -96,9 +95,6 @@ class Agent( legacy_config=legacy_config, ) - self.memory = memory - """VectorMemoryProvider used to manage the agent's context (TODO)""" - self.created_at = datetime.now().strftime("%Y%m%d_%H%M%S") """Timestamp the agent was created; only used for structured debug logging.""" diff --git a/autogpts/autogpt/autogpt/agents/base.py b/autogpts/autogpt/autogpt/agents/base.py index 100d1ec3..652de663 100644 --- a/autogpts/autogpt/autogpt/agents/base.py +++ b/autogpts/autogpt/autogpt/agents/base.py @@ -2,12 +2,14 @@ from __future__ import annotations import logging from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Literal, Optional +from typing import TYPE_CHECKING, Any, Optional from auto_gpt_plugin_template import AutoGPTPluginTemplate from pydantic import Field, validator if TYPE_CHECKING: + from pathlib import Path + from autogpt.config import Config from autogpt.core.prompting.base import PromptStrategy from autogpt.core.resource.model_providers.schema import ( @@ -18,6 +20,7 @@ if TYPE_CHECKING: from autogpt.models.command_registry import CommandRegistry from autogpt.agents.utils.prompt_scratchpad import PromptScratchpad +from autogpt.config import ConfigBuilder from autogpt.config.ai_profile import AIProfile from autogpt.config.ai_directives import AIDirectives from autogpt.core.configuration import ( @@ -40,6 +43,8 @@ from autogpt.llm.providers.openai import get_openai_command_specs from autogpt.models.action_history import ActionResult, EpisodicActionHistory from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT +from .utils.agent_file_manager import AgentFileManager + logger = logging.getLogger(__name__) CommandName = str @@ -48,6 +53,8 @@ AgentThoughts = dict[str, Any] class BaseAgentConfiguration(SystemConfiguration): + allow_fs_access: bool = UserConfigurable(default=False) + fast_llm: OpenAIModelName = UserConfigurable(default=OpenAIModelName.GPT3_16k) smart_llm: OpenAIModelName = UserConfigurable(default=OpenAIModelName.GPT4) use_functions_api: bool = UserConfigurable(default=False) @@ -118,27 +125,33 @@ class BaseAgentConfiguration(SystemConfiguration): class BaseAgentSettings(SystemSettings): - ai_profile: AIProfile = Field(default_factory=lambda: AIProfile(ai_name="AutoGPT")) - """The AIProfile or "personality" of this agent.""" + agent_data_dir: Optional[Path] = None - config: BaseAgentConfiguration + ai_profile: AIProfile = Field(default_factory=lambda: AIProfile(ai_name="AutoGPT")) + """The AI profile or "personality" of the agent.""" + + directives: AIDirectives = Field( + default_factory=lambda: AIDirectives.from_file( + ConfigBuilder.default_settings.prompt_settings_file + ) + ) + """Directives (general instructional guidelines) for the agent.""" + + config: BaseAgentConfiguration = Field(default_factory=BaseAgentConfiguration) """The configuration for this BaseAgent subsystem instance.""" - history: EpisodicActionHistory + history: EpisodicActionHistory = Field(default_factory=EpisodicActionHistory) """(STATE) The action history of the agent.""" class BaseAgent(Configurable[BaseAgentSettings], ABC): """Base class for all AutoGPT agent classes.""" - ThoughtProcessID = Literal["one-shot"] ThoughtProcessOutput = tuple[CommandName, CommandArgs, AgentThoughts] default_settings = BaseAgentSettings( name="BaseAgent", description=__doc__, - config=BaseAgentConfiguration(), - history=EpisodicActionHistory(), ) def __init__( @@ -149,8 +162,20 @@ class BaseAgent(Configurable[BaseAgentSettings], ABC): command_registry: CommandRegistry, legacy_config: Config, ): + self.state = settings + self.config = settings.config self.ai_profile = settings.ai_profile - self.ai_directives = AIDirectives.from_file(legacy_config.prompt_settings_file) + self.ai_directives = settings.directives + self.event_history = settings.history + + self.legacy_config = legacy_config + """LEGACY: Monolithic application configuration.""" + + self.file_manager = ( + AgentFileManager(settings.agent_data_dir) + if settings.agent_data_dir + else None + ) self.llm_provider = llm_provider @@ -159,14 +184,6 @@ class BaseAgent(Configurable[BaseAgentSettings], ABC): self.command_registry = command_registry """The registry containing all commands available to the agent.""" - self.llm_provider = llm_provider - - self.legacy_config = legacy_config - self.config = settings.config - """The applicable application configuration.""" - - self.event_history = settings.history - self._prompt_scratchpad: PromptScratchpad | None = None # Support multi-inheritance and mixins for subclasses @@ -174,6 +191,12 @@ class BaseAgent(Configurable[BaseAgentSettings], ABC): logger.debug(f"Created {__class__} '{self.ai_profile.ai_name}'") + def attach_fs(self, agent_dir: Path) -> AgentFileManager: + self.file_manager = AgentFileManager(agent_dir) + self.file_manager.initialize() + self.state.agent_data_dir = agent_dir + return self.file_manager + @property def llm(self) -> ChatModelInfo: """The LLM that the agent uses to think.""" diff --git a/autogpts/autogpt/autogpt/agents/features/workspace.py b/autogpts/autogpt/autogpt/agents/features/workspace.py index 34eceb27..0e133aa0 100644 --- a/autogpts/autogpt/autogpt/agents/features/workspace.py +++ b/autogpts/autogpt/autogpt/agents/features/workspace.py @@ -3,33 +3,51 @@ from __future__ import annotations from typing import TYPE_CHECKING if TYPE_CHECKING: + from pathlib import Path + from ..base import BaseAgent -from autogpt.config import Config from autogpt.workspace import Workspace +from ..base import AgentFileManager, BaseAgentConfiguration + class WorkspaceMixin: """Mixin that adds workspace support to a class""" - workspace: Workspace + workspace: Workspace | None """Workspace that the agent has access to, e.g. for reading/writing files.""" def __init__(self, **kwargs): # Initialize other bases first, because we need the config from BaseAgent super(WorkspaceMixin, self).__init__(**kwargs) - legacy_config: Config = getattr(self, "legacy_config") - if not isinstance(legacy_config, Config): - raise ValueError(f"Cannot initialize Workspace for Agent without Config") - if not legacy_config.workspace_path: + config: BaseAgentConfiguration = getattr(self, "config") + if not isinstance(config, BaseAgentConfiguration): raise ValueError( - f"Cannot set up Workspace: no WORKSPACE_PATH in legacy_config" + "Cannot initialize Workspace for Agent without compatible .config" ) + file_manager: AgentFileManager = getattr(self, "file_manager") + if not file_manager: + return - self.workspace = Workspace( - legacy_config.workspace_path, legacy_config.restrict_to_workspace - ) + self.workspace = _setup_workspace(file_manager, config) + + def attach_fs(self, agent_dir: Path): + res = super(WorkspaceMixin, self).attach_fs(agent_dir) + + self.workspace = _setup_workspace(self.file_manager, self.config) + + return res + + +def _setup_workspace(file_manager: AgentFileManager, config: BaseAgentConfiguration): + workspace = Workspace( + file_manager.root / "workspace", + restrict_to_workspace=not config.allow_fs_access, + ) + workspace.initialize() + return workspace def get_agent_workspace(agent: BaseAgent) -> Workspace | None: diff --git a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py index 38da3745..8422768d 100644 --- a/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py +++ b/autogpts/autogpt/autogpt/agents/prompt_strategies/one_shot.py @@ -7,6 +7,7 @@ from logging import Logger from typing import TYPE_CHECKING, Callable, Optional import distro +from pydantic import Field if TYPE_CHECKING: from autogpt.agents.agent import Agent @@ -166,7 +167,9 @@ class OneShotAgentPromptConfiguration(SystemConfiguration): ######### # State # ######### - progress_summaries: dict[tuple[int, int], str] = {(0, 0): ""} + progress_summaries: dict[tuple[int, int], str] = Field( + default_factory=lambda: {(0, 0): ""} + ) class OneShotAgentPromptStrategy(PromptStrategy): diff --git a/autogpts/autogpt/autogpt/agents/utils/agent_file_manager.py b/autogpts/autogpt/autogpt/agents/utils/agent_file_manager.py new file mode 100644 index 00000000..b8e0f055 --- /dev/null +++ b/autogpts/autogpt/autogpt/agents/utils/agent_file_manager.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) + + +class AgentFileManager: + """A class that represents a workspace for an AutoGPT agent.""" + + def __init__(self, agent_data_dir: Path): + self._root = agent_data_dir.resolve() + + @property + def root(self) -> Path: + """The root directory of the workspace.""" + return self._root + + def initialize(self) -> None: + self.root.mkdir(exist_ok=True, parents=True) + self.init_file_ops_log(self.file_ops_log_path) + + @property + def state_file_path(self) -> Path: + return self.root / "state.json" + + @property + def file_ops_log_path(self) -> Path: + return self.root / "file_logger.log" + + @staticmethod + def init_file_ops_log(file_logger_path: Path) -> Path: + if not file_logger_path.exists(): + with file_logger_path.open(mode="w", encoding="utf-8") as f: + f.write("File Operation Logger ") + return file_logger_path diff --git a/autogpts/autogpt/autogpt/app/main.py b/autogpts/autogpt/autogpt/app/main.py index af1f52b1..70c704ea 100644 --- a/autogpts/autogpt/autogpt/app/main.py +++ b/autogpts/autogpt/autogpt/app/main.py @@ -26,6 +26,7 @@ from autogpt.app.utils import ( ) from autogpt.commands import COMMAND_CATEGORIES from autogpt.config import ( + AIDirectives, AIProfile, Config, ConfigBuilder, @@ -40,10 +41,8 @@ from autogpt.core.runner.client_lib.utils import coroutine from autogpt.llm.api_manager import ApiManager from autogpt.logs.config import configure_chat_plugins, configure_logging from autogpt.logs.helpers import print_attribute, speak -from autogpt.memory.vector import get_memory from autogpt.models.command_registry import CommandRegistry from autogpt.plugins import scan_plugins -from autogpt.workspace import Workspace from scripts.install_plugin_deps import install_plugin_dependencies @@ -62,7 +61,6 @@ async def run_auto_gpt( browser_name: str, allow_downloads: bool, skip_news: bool, - working_directory: Path, workspace_directory: str | Path, install_plugin_deps: bool, ai_name: Optional[str] = None, @@ -121,16 +119,6 @@ async def run_auto_gpt( if install_plugin_deps: install_plugin_dependencies() - # TODO: have this directory live outside the repository (e.g. in a user's - # home directory) and have it come in as a command line argument or part of - # the env file. - config.workspace_path = Workspace.init_workspace_directory( - config, workspace_directory - ) - - # HACK: doing this here to collect some globals that depend on the workspace. - config.file_logger_path = Workspace.build_file_logger_path(config.workspace_path) - config.plugins = scan_plugins(config, config.debug_mode) configure_chat_plugins(config) @@ -144,6 +132,7 @@ async def run_auto_gpt( role=ai_role, goals=ai_goals, ) + ai_directives = AIDirectives.from_file(config.prompt_settings_file) agent_prompt_config = Agent.default_settings.prompt_config.copy(deep=True) agent_prompt_config.use_functions_api = config.openai_functions @@ -152,9 +141,11 @@ async def run_auto_gpt( name=Agent.default_settings.name, description=Agent.default_settings.description, ai_profile=ai_profile, + directives=ai_directives, config=AgentConfiguration( fast_llm=config.fast_llm, smart_llm=config.smart_llm, + allow_fs_access=not config.restrict_to_workspace, use_functions_api=config.openai_functions, plugins=config.plugins, ), @@ -162,21 +153,22 @@ async def run_auto_gpt( history=Agent.default_settings.history.copy(deep=True), ) - # Initialize memory and make sure it is empty. - # this is particularly important for indexing and referencing pinecone memory - memory = get_memory(config) - memory.clear() - print_attribute("Configured Memory", memory.__class__.__name__) - print_attribute("Configured Browser", config.selenium_web_browser) agent = Agent( settings=agent_settings, llm_provider=llm_provider, command_registry=command_registry, - memory=memory, legacy_config=config, ) + agent.attach_fs(config.app_data_dir / "agents" / "AutoGPT") # HACK + + if not agent.config.allow_fs_access: + logger.info( + f"{Fore.YELLOW}NOTE: All files/directories created by this agent" + f" can be found inside its workspace at:{Fore.RESET} {agent.workspace.root}", + extra={"preserve_color": True}, + ) await run_interaction_loop(agent) @@ -587,12 +579,6 @@ Continue ({config.authorise_key}/{config.exit_key}): """, ai_profile = await interactive_ai_profile_setup(config, llm_provider) ai_profile.save(config.ai_settings_file) - if config.restrict_to_workspace: - logger.info( - f"{Fore.YELLOW}NOTE: All files/directories created by this agent" - f" can be found inside its workspace at:{Fore.RESET} {config.workspace_path}", - extra={"preserve_color": True}, - ) # set the total api budget api_manager = ApiManager() api_manager.set_total_budget(ai_profile.api_budget) diff --git a/autogpts/autogpt/autogpt/commands/execute_code.py b/autogpts/autogpt/autogpt/commands/execute_code.py index 5e05efe0..f9d55885 100644 --- a/autogpts/autogpt/autogpt/commands/execute_code.py +++ b/autogpts/autogpt/autogpt/commands/execute_code.py @@ -102,7 +102,7 @@ def execute_python_file( str: The output of the file """ logger.info( - f"Executing python file '{filename}' in working directory '{agent.legacy_config.workspace_path}'" + f"Executing python file '{filename}' in working directory '{agent.workspace.root}'" ) if isinstance(args, str): diff --git a/autogpts/autogpt/autogpt/commands/file_operations.py b/autogpts/autogpt/autogpt/commands/file_operations.py index 2a4bb784..4238ff2f 100644 --- a/autogpts/autogpt/autogpt/commands/file_operations.py +++ b/autogpts/autogpt/autogpt/commands/file_operations.py @@ -62,15 +62,15 @@ def operations_from_log( def file_operations_state(log_path: str | Path) -> dict[str, str]: """Iterates over the operations log and returns the expected state. - Parses a log file at config.file_logger_path to construct a dictionary that maps - each file path written or appended to its checksum. Deleted files are removed - from the dictionary. + Parses a log file at file_manager.file_ops_log_path to construct a dictionary + that maps each file path written or appended to its checksum. Deleted files are + removed from the dictionary. Returns: A dictionary mapping file paths to their checksums. Raises: - FileNotFoundError: If config.file_logger_path is not found. + FileNotFoundError: If file_manager.file_ops_log_path is not found. ValueError: If the log file content is not in the expected format. """ state = {} @@ -101,7 +101,7 @@ def is_duplicate_operation( with contextlib.suppress(ValueError): file_path = file_path.relative_to(agent.workspace.root) - state = file_operations_state(agent.legacy_config.file_logger_path) + state = file_operations_state(agent.file_manager.file_ops_log_path) if operation == "delete" and str(file_path) not in state: return True if operation == "write" and state.get(str(file_path)) == checksum: @@ -129,7 +129,7 @@ def log_operation( log_entry += f" #{checksum}" logger.debug(f"Logging file operation: {log_entry}") append_to_file( - agent.legacy_config.file_logger_path, f"{log_entry}\n", agent, should_log=False + agent.file_manager.file_ops_log_path, f"{log_entry}\n", agent, should_log=False ) @@ -280,7 +280,7 @@ def list_folder(folder: Path, agent: Agent) -> list[str]: if file.startswith("."): continue relative_path = os.path.relpath( - os.path.join(root, file), agent.legacy_config.workspace_path + os.path.join(root, file), agent.workspace.root ) found_files.append(relative_path) diff --git a/autogpts/autogpt/autogpt/commands/image_gen.py b/autogpts/autogpt/autogpt/commands/image_gen.py index 33d18956..ba771635 100644 --- a/autogpts/autogpt/autogpt/commands/image_gen.py +++ b/autogpts/autogpt/autogpt/commands/image_gen.py @@ -44,7 +44,7 @@ def generate_image(prompt: str, agent: Agent, size: int = 256) -> str: Returns: str: The filename of the image """ - filename = agent.legacy_config.workspace_path / f"{str(uuid.uuid4())}.jpg" + filename = agent.workspace.root / f"{str(uuid.uuid4())}.jpg" # DALL-E if agent.legacy_config.image_provider == "dalle": diff --git a/autogpts/autogpt/autogpt/config/ai_directives.py b/autogpts/autogpt/autogpt/config/ai_directives.py index 09002d51..7bb38817 100644 --- a/autogpts/autogpt/autogpt/config/ai_directives.py +++ b/autogpts/autogpt/autogpt/config/ai_directives.py @@ -1,10 +1,8 @@ -from __future__ import annotations - import logging from pathlib import Path import yaml -from pydantic import BaseModel +from pydantic import BaseModel, Field from autogpt.logs.helpers import request_user_double_check from autogpt.utils import validate_yaml_file @@ -21,12 +19,12 @@ class AIDirectives(BaseModel): best_practices (list): A list of best practices that the AI should follow. """ - constraints: list[str] - resources: list[str] - best_practices: list[str] + resources: list[str] = Field(default_factory=list) + constraints: list[str] = Field(default_factory=list) + best_practices: list[str] = Field(default_factory=list) @staticmethod - def from_file(prompt_settings_file: Path) -> AIDirectives: + def from_file(prompt_settings_file: Path) -> "AIDirectives": (validated, message) = validate_yaml_file(prompt_settings_file) if not validated: logger.error(message, extra={"title": "FAILED FILE VALIDATION"}) diff --git a/autogpts/autogpt/autogpt/config/config.py b/autogpts/autogpt/autogpt/config/config.py index 899f82b7..5436a670 100644 --- a/autogpts/autogpt/autogpt/config/config.py +++ b/autogpts/autogpt/autogpt/config/config.py @@ -53,8 +53,6 @@ class Config(SystemSettings, arbitrary_types_allowed=True): # Paths ai_settings_file: Path = project_root / AI_SETTINGS_FILE prompt_settings_file: Path = project_root / PROMPT_SETTINGS_FILE - workspace_path: Optional[Path] = None - file_logger_path: Optional[Path] = None # Model configuration fast_llm: str = "gpt-3.5-turbo-16k" smart_llm: str = "gpt-4-0314" diff --git a/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py b/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py index fc87a863..07ccc3fc 100644 --- a/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py +++ b/autogpts/autogpt/autogpt/core/runner/cli_web_app/server/api.py @@ -8,10 +8,8 @@ from autogpt.app.main import UserFeedback from autogpt.commands import COMMAND_CATEGORIES from autogpt.config import AIProfile, ConfigBuilder from autogpt.logs.helpers import user_friendly_output -from autogpt.memory.vector import get_memory from autogpt.models.command_registry import CommandRegistry from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT -from autogpt.workspace import Workspace async def task_handler(task_input) -> StepHandler: @@ -87,15 +85,12 @@ def bootstrap_agent(task, continuous_mode) -> Agent: config.plain_output = True command_registry = CommandRegistry.with_command_modules(COMMAND_CATEGORIES, config) config.memory_backend = "no_memory" - config.workspace_path = Workspace.init_workspace_directory(config) - config.file_logger_path = Workspace.build_file_logger_path(config.workspace_path) ai_profile = AIProfile( ai_name="AutoGPT", ai_role="a multi-purpose AI assistant.", ai_goals=[task], ) return Agent( - memory=get_memory(config), command_registry=command_registry, ai_profile=ai_profile, config=config, diff --git a/autogpts/autogpt/autogpt/workspace/workspace.py b/autogpts/autogpt/autogpt/workspace/workspace.py index 125e6740..b2a81e74 100644 --- a/autogpts/autogpt/autogpt/workspace/workspace.py +++ b/autogpts/autogpt/autogpt/workspace/workspace.py @@ -13,8 +13,6 @@ import logging from pathlib import Path from typing import Optional -from autogpt.config import Config - logger = logging.getLogger(__name__) @@ -37,25 +35,8 @@ class Workspace: """Whether to restrict generated paths to the workspace.""" return self._restrict_to_workspace - @classmethod - def make_workspace(cls, workspace_directory: str | Path, *args, **kwargs) -> Path: - """Create a workspace directory and return the path to it. - - Parameters - ---------- - workspace_directory - The path to the workspace directory. - - Returns - ------- - Path - The path to the workspace directory. - - """ - # TODO: have this make the env file and ai settings file in the directory. - workspace_directory = cls._sanitize_path(workspace_directory) - workspace_directory.mkdir(exist_ok=True, parents=True) - return workspace_directory + def initialize(self) -> None: + self.root.mkdir(exist_ok=True, parents=True) def get_path(self, relative_path: str | Path) -> Path: """Get the full path for an item in the workspace. @@ -144,26 +125,3 @@ class Workspace: ) return full_path - - @staticmethod - def build_file_logger_path(workspace_directory: Path) -> Path: - file_logger_path = workspace_directory / "file_logger.log" - if not file_logger_path.exists(): - with file_logger_path.open(mode="w", encoding="utf-8") as f: - f.write("File Operation Logger ") - return file_logger_path - - @staticmethod - def init_workspace_directory( - config: Config, override_workspace_path: Optional[str | Path] = None - ) -> Path: - if override_workspace_path is None: - workspace_path = config.workdir / "auto_gpt_workspace" - elif type(override_workspace_path) == str: - workspace_path = Path(override_workspace_path) - else: - workspace_path = override_workspace_path - - # TODO: pass in the ai_settings file and the env file and have them cloned into - # the workspace directory so we can bind them to the agent. - return Workspace.make_workspace(workspace_path) diff --git a/autogpts/autogpt/data/agents/AutoGPT/workspace/main.py b/autogpts/autogpt/data/agents/AutoGPT/workspace/main.py new file mode 100644 index 00000000..e69de29b diff --git a/autogpts/autogpt/tests/conftest.py b/autogpts/autogpt/tests/conftest.py index 6faf4975..78c7e61d 100644 --- a/autogpts/autogpt/tests/conftest.py +++ b/autogpts/autogpt/tests/conftest.py @@ -24,14 +24,30 @@ pytest_plugins = [ @pytest.fixture() -def workspace_root(tmp_path: Path) -> Path: - return tmp_path / "home/users/monty/auto_gpt_workspace" +def tmp_project_root(tmp_path: Path) -> Path: + return tmp_path + + +@pytest.fixture() +def app_data_dir(tmp_project_root: Path) -> Path: + return tmp_project_root / "data" + + +@pytest.fixture() +def agent_data_dir(app_data_dir: Path) -> Path: + return app_data_dir / "agents/AutoGPT" + + +@pytest.fixture() +def workspace_root(agent_data_dir: Path) -> Path: + return agent_data_dir / "workspace" @pytest.fixture() def workspace(workspace_root: Path) -> Workspace: - workspace_root = Workspace.make_workspace(workspace_root) - return Workspace(workspace_root, restrict_to_workspace=True) + workspace = Workspace(workspace_root, restrict_to_workspace=True) + workspace.initialize() + return workspace @pytest.fixture @@ -46,12 +62,17 @@ def temp_plugins_config_file(): @pytest.fixture() -def config(temp_plugins_config_file: Path, mocker: MockerFixture, workspace: Workspace): - config = ConfigBuilder.build_config_from_env(project_root=workspace.root.parent) +def config( + temp_plugins_config_file: Path, + tmp_project_root: Path, + app_data_dir: Path, + mocker: MockerFixture, +): + config = ConfigBuilder.build_config_from_env(project_root=tmp_project_root) if not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = "sk-dummy" - config.workspace_path = workspace.root + config.app_data_dir = app_data_dir config.plugins_dir = "tests/unit/data/test_plugins" config.plugins_config_file = temp_plugins_config_file @@ -67,13 +88,6 @@ def config(temp_plugins_config_file: Path, mocker: MockerFixture, workspace: Wor plugins_denylist=config.plugins_denylist, plugins_allowlist=config.plugins_allowlist, ) - - # Do a little setup and teardown since the config object is a singleton - mocker.patch.multiple( - config, - workspace_path=workspace.root, - file_logger_path=workspace.get_path("file_logger.log"), - ) yield config @@ -99,7 +113,9 @@ def llm_provider(config: Config) -> OpenAIProvider: @pytest.fixture -def agent(config: Config, llm_provider: ChatModelProvider) -> Agent: +def agent( + agent_data_dir: Path, config: Config, llm_provider: ChatModelProvider +) -> Agent: ai_profile = AIProfile( ai_name="Base", ai_role="A base AI", @@ -107,9 +123,6 @@ def agent(config: Config, llm_provider: ChatModelProvider) -> Agent: ) command_registry = CommandRegistry() - config.memory_backend = "json_file" - memory_json_file = get_memory(config) - memory_json_file.clear() agent_prompt_config = Agent.default_settings.prompt_config.copy(deep=True) agent_prompt_config.use_functions_api = config.openai_functions @@ -121,6 +134,7 @@ def agent(config: Config, llm_provider: ChatModelProvider) -> Agent: config=AgentConfiguration( fast_llm=config.fast_llm, smart_llm=config.smart_llm, + allow_fs_access=not config.restrict_to_workspace, use_functions_api=config.openai_functions, plugins=config.plugins, ), @@ -128,10 +142,11 @@ def agent(config: Config, llm_provider: ChatModelProvider) -> Agent: history=Agent.default_settings.history.copy(deep=True), ) - return Agent( + agent = Agent( settings=agent_settings, llm_provider=llm_provider, command_registry=command_registry, - memory=memory_json_file, legacy_config=config, ) + agent.attach_fs(agent_data_dir) + return agent diff --git a/autogpts/autogpt/tests/integration/agent_factory.py b/autogpts/autogpt/tests/integration/agent_factory.py index be97b38a..dfff73b9 100644 --- a/autogpts/autogpt/tests/integration/agent_factory.py +++ b/autogpts/autogpt/tests/integration/agent_factory.py @@ -50,7 +50,6 @@ def dummy_agent(config: Config, llm_provider, memory_json_file): settings=agent_settings, llm_provider=llm_provider, command_registry=command_registry, - memory=memory_json_file, legacy_config=config, ) diff --git a/autogpts/autogpt/tests/integration/test_execute_code.py b/autogpts/autogpt/tests/integration/test_execute_code.py index 3049d01e..cc281a0d 100644 --- a/autogpts/autogpt/tests/integration/test_execute_code.py +++ b/autogpts/autogpt/tests/integration/test_execute_code.py @@ -11,7 +11,6 @@ from autogpt.agents.utils.exceptions import ( InvalidArgumentError, OperationNotAllowedError, ) -from autogpt.config import Config @pytest.fixture @@ -20,8 +19,8 @@ def random_code(random_string) -> str: @pytest.fixture -def python_test_file(config: Config, random_code: str): - temp_file = tempfile.NamedTemporaryFile(dir=config.workspace_path, suffix=".py") +def python_test_file(agent: Agent, random_code: str): + temp_file = tempfile.NamedTemporaryFile(dir=agent.workspace.root, suffix=".py") temp_file.write(str.encode(random_code)) temp_file.flush() @@ -30,8 +29,8 @@ def python_test_file(config: Config, random_code: str): @pytest.fixture -def python_test_args_file(config: Config): - temp_file = tempfile.NamedTemporaryFile(dir=config.workspace_path, suffix=".py") +def python_test_args_file(agent: Agent): + temp_file = tempfile.NamedTemporaryFile(dir=agent.workspace.root, suffix=".py") temp_file.write(str.encode("import sys\nprint(sys.argv[1], sys.argv[2])")) temp_file.flush() diff --git a/autogpts/autogpt/tests/unit/test_file_operations.py b/autogpts/autogpt/tests/unit/test_file_operations.py index 75b5c588..0ea9ed73 100644 --- a/autogpts/autogpt/tests/unit/test_file_operations.py +++ b/autogpts/autogpt/tests/unit/test_file_operations.py @@ -169,7 +169,7 @@ def test_is_duplicate_operation(agent: Agent, mocker: MockerFixture): # Test logging a file operation def test_log_operation(agent: Agent): file_ops.log_operation("log_test", "path/to/test", agent=agent) - with open(agent.legacy_config.file_logger_path, "r", encoding="utf-8") as f: + with open(agent.file_manager.file_ops_log_path, "r", encoding="utf-8") as f: content = f.read() assert f"log_test: path/to/test\n" in content @@ -183,7 +183,7 @@ def test_text_checksum(file_content: str): def test_log_operation_with_checksum(agent: Agent): file_ops.log_operation("log_test", "path/to/test", agent=agent, checksum="ABCDEF") - with open(agent.legacy_config.file_logger_path, "r", encoding="utf-8") as f: + with open(agent.file_manager.file_ops_log_path, "r", encoding="utf-8") as f: content = f.read() assert f"log_test: path/to/test #ABCDEF\n" in content @@ -224,7 +224,7 @@ def test_write_file_logs_checksum(test_file_name: Path, agent: Agent): new_content = "This is new content.\n" new_checksum = file_ops.text_checksum(new_content) file_ops.write_to_file(str(test_file_name), new_content, agent=agent) - with open(agent.legacy_config.file_logger_path, "r", encoding="utf-8") as f: + with open(agent.file_manager.file_ops_log_path, "r", encoding="utf-8") as f: log_entry = f.read() assert log_entry == f"write: {test_file_name} #{new_checksum}\n" @@ -266,7 +266,7 @@ def test_append_to_file_uses_checksum_from_appended_file( append_text = "This is appended text.\n" file_ops.append_to_file(test_file_name, append_text, agent=agent) file_ops.append_to_file(test_file_name, append_text, agent=agent) - with open(agent.legacy_config.file_logger_path, "r", encoding="utf-8") as f: + with open(agent.file_manager.file_ops_log_path, "r", encoding="utf-8") as f: log_contents = f.read() digest = hashlib.md5()