Files
Auto-GPT/autogpts/autogpt/tests/conftest.py
Reinier van der Leer 1f40d72081 feat(agent/workspace): Add GCS and S3 FileWorkspace providers (#6485)
* refactor: Rename FileWorkspace to LocalFileWorkspace and create FileWorkspace abstract class
  - Rename `FileWorkspace` to `LocalFileWorkspace` to provide a more descriptive name for the class that represents a file workspace that works with local files.
  - Create a new base class `FileWorkspace` to serve as the parent class for `LocalFileWorkspace`. This allows for easier extension and customization of file workspaces in the future.
  - Update import statements and references to `FileWorkspace` throughout the codebase to use the new naming conventions.

* feat: Add S3FileWorkspace + tests + test setups for CI and Docker
  - Added S3FileWorkspace class to provide an interface for interacting with a file workspace and storing files in an S3 bucket.
  - Updated pyproject.toml to include dependencies for boto3 and boto3-stubs.
  - Implemented unit tests for S3FileWorkspace.
  - Added MinIO service to Docker CI to allow testing S3 features in CI.
  - Added autogpt-test service config to docker-compose.yml for local testing with MinIO.

* ci(docker): tee test output instead of capturing

* fix: Improve error handling in S3FileWorkspace.initialize()
  - Do not tolerate all `botocore.exceptions.ClientError`s
  - Raise the exception anyways if the error is not "NoSuchBucket"

* feat: Add S3 workspace backend support and S3Credentials
  - Added support for S3 workspace backend in the Autogpt configuration
  - Added a new sub-config `S3Credentials` to store S3 credentials
  - Modified the `.env.template` file to include variables related to S3 credentials
  - Added a new `s3_credentials` attribute on the `Config` class to store S3 credentials
  - Moved the `unmasked` method from `ModelProviderCredentials` to the parent `ProviderCredentials` class to handle unmasking for S3 credentials

* fix(agent/tests): Fix S3FileWorkspace initialization in test_s3_file_workspace.py
  - Update the S3FileWorkspace initialization in the test_s3_file_workspace.py file to include the required S3 Credentials.

* refactor: Remove S3Credentials and add get_workspace function
  - Remove `S3Credentials` as boto3 will fetch the config from the environment by itself
  - Add `get_workspace` function in `autogpt.file_workspace` module
  - Update `.env.template` and tests to reflect the changes

* feat(agent/workspace): Make agent workspace backend configurable
  - Modified `autogpt.file_workspace.get_workspace` function to either take a workspace `id` or `root_path`.
  - Modified `FileWorkspaceMixin` to use the `get_workspace` function to set up the workspace.
  - Updated the type hints and imports accordingly.

* feat(agent/workspace): Add GCSFileWorkspace for Google Cloud Storage
  - Added support for Google Cloud Storage as a storage backend option in the workspace.
  - Created the `GCSFileWorkspace` class to interface with a file workspace stored in a Google Cloud Storage bucket.
  - Implemented the `GCSFileWorkspaceConfiguration` class to handle the configuration for Google Cloud Storage workspaces.
  - Updated the `get_workspace` function to include the option to use Google Cloud Storage as a workspace backend.
  - Added unit tests for the new `GCSFileWorkspace` class.

* fix: Unbreak use of non-local workspaces in AgentProtocolServer
  - Modify the `_get_task_agent_file_workspace` method to handle both local and non-local workspaces correctly
2023-12-07 14:46:08 +01:00

160 lines
4.4 KiB
Python

import os
import uuid
from pathlib import Path
from tempfile import TemporaryDirectory
import pytest
import yaml
from pytest_mock import MockerFixture
from autogpt.agents.agent import Agent, AgentConfiguration, AgentSettings
from autogpt.app.main import _configure_openai_provider
from autogpt.config import AIProfile, Config, ConfigBuilder
from autogpt.core.resource.model_providers import ChatModelProvider, OpenAIProvider
from autogpt.file_workspace.local import (
FileWorkspace,
FileWorkspaceConfiguration,
LocalFileWorkspace,
)
from autogpt.llm.api_manager import ApiManager
from autogpt.logs.config import configure_logging
from autogpt.models.command_registry import CommandRegistry
pytest_plugins = [
"tests.integration.agent_factory",
"tests.integration.memory.utils",
"tests.vcr",
]
@pytest.fixture()
def tmp_project_root(tmp_path: Path) -> Path:
return tmp_path
@pytest.fixture()
def app_data_dir(tmp_project_root: Path) -> Path:
dir = tmp_project_root / "data"
dir.mkdir(parents=True, exist_ok=True)
return dir
@pytest.fixture()
def agent_data_dir(app_data_dir: Path) -> Path:
return app_data_dir / "agents/AutoGPT"
@pytest.fixture()
def workspace_root(agent_data_dir: Path) -> Path:
return agent_data_dir / "workspace"
@pytest.fixture()
def workspace(workspace_root: Path) -> FileWorkspace:
workspace = LocalFileWorkspace(FileWorkspaceConfiguration(root=workspace_root))
workspace.initialize()
return workspace
@pytest.fixture
def temp_plugins_config_file():
"""
Create a plugins_config.yaml file in a temp directory
so that it doesn't mess with existing ones.
"""
config_directory = TemporaryDirectory()
config_file = Path(config_directory.name) / "plugins_config.yaml"
with open(config_file, "w+") as f:
f.write(yaml.dump({}))
yield config_file
@pytest.fixture()
def config(
temp_plugins_config_file: Path,
tmp_project_root: Path,
app_data_dir: Path,
mocker: MockerFixture,
):
if not os.environ.get("OPENAI_API_KEY"):
os.environ["OPENAI_API_KEY"] = "sk-dummy"
config = ConfigBuilder.build_config_from_env(project_root=tmp_project_root)
config.app_data_dir = app_data_dir
config.plugins_dir = "tests/unit/data/test_plugins"
config.plugins_config_file = temp_plugins_config_file
config.logging.log_dir = Path(__file__).parent / "logs"
config.logging.plain_console_output = True
config.noninteractive_mode = True
# avoid circular dependency
from autogpt.plugins.plugins_config import PluginsConfig
config.plugins_config = PluginsConfig.load_config(
plugins_config_file=config.plugins_config_file,
plugins_denylist=config.plugins_denylist,
plugins_allowlist=config.plugins_allowlist,
)
yield config
@pytest.fixture(scope="session")
def setup_logger(config: Config):
configure_logging(**config.logging.dict())
@pytest.fixture()
def api_manager() -> ApiManager:
if ApiManager in ApiManager._instances:
del ApiManager._instances[ApiManager]
return ApiManager()
@pytest.fixture
def llm_provider(config: Config) -> OpenAIProvider:
return _configure_openai_provider(config)
@pytest.fixture
def agent(
agent_data_dir: Path, config: Config, llm_provider: ChatModelProvider
) -> Agent:
ai_profile = AIProfile(
ai_name="Base",
ai_role="A base AI",
ai_goals=[],
)
command_registry = CommandRegistry()
agent_prompt_config = Agent.default_settings.prompt_config.copy(deep=True)
agent_prompt_config.use_functions_api = config.openai_functions
agent_settings = AgentSettings(
name=Agent.default_settings.name,
description=Agent.default_settings.description,
agent_id=f"AutoGPT-test-agent-{str(uuid.uuid4())[:8]}",
ai_profile=ai_profile,
config=AgentConfiguration(
fast_llm=config.fast_llm,
smart_llm=config.smart_llm,
allow_fs_access=not config.restrict_to_workspace,
use_functions_api=config.openai_functions,
plugins=config.plugins,
),
prompt_config=agent_prompt_config,
history=Agent.default_settings.history.copy(deep=True),
)
agent = Agent(
settings=agent_settings,
llm_provider=llm_provider,
command_registry=command_registry,
legacy_config=config,
)
agent.attach_fs(agent_data_dir)
return agent