diff --git a/.gitignore b/.gitignore index 9695cf4a..195ecb71 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ ## Original ignores autogpt/keys.py autogpt/*.json -**/auto_gpt_workspace/* +auto_gpt_workspace/* *.mpeg .env azure.yaml diff --git a/autogpt/cli.py b/autogpt/cli.py index 690c1626..6deb00bf 100644 --- a/autogpt/cli.py +++ b/autogpt/cli.py @@ -1,4 +1,5 @@ """Main script for the autogpt package.""" +from pathlib import Path from typing import Optional import click @@ -115,24 +116,25 @@ def main( if ctx.invoked_subcommand is None: run_auto_gpt( - continuous, - continuous_limit, - ai_settings, - prompt_settings, - skip_reprompt, - speak, - debug, - gpt3only, - gpt4only, - memory_type, - browser_name, - allow_downloads, - skip_news, - workspace_directory, - install_plugin_deps, - ai_name, - ai_role, - ai_goal, + continuous=continuous, + continuous_limit=continuous_limit, + ai_settings=ai_settings, + prompt_settings=prompt_settings, + skip_reprompt=skip_reprompt, + speak=speak, + debug=debug, + gpt3only=gpt3only, + gpt4only=gpt4only, + memory_type=memory_type, + browser_name=browser_name, + allow_downloads=allow_downloads, + skip_news=skip_news, + working_directory=Path(__file__).parent.parent, # TODO: make this an option + workspace_directory=workspace_directory, + install_plugin_deps=install_plugin_deps, + ai_name=ai_name, + ai_role=ai_role, + ai_goals=ai_goal, ) diff --git a/autogpt/commands/execute_code.py b/autogpt/commands/execute_code.py index fb4cb70e..dd35f859 100644 --- a/autogpt/commands/execute_code.py +++ b/autogpt/commands/execute_code.py @@ -150,7 +150,7 @@ def execute_python_file(filename: str, agent: Agent) -> str: file_path.relative_to(agent.workspace.root).as_posix(), ], volumes={ - agent.config.workspace_path: { + str(agent.config.workspace_path): { "bind": "/workspace", "mode": "rw", } diff --git a/autogpt/commands/image_gen.py b/autogpt/commands/image_gen.py index abae6149..e02400a8 100644 --- a/autogpt/commands/image_gen.py +++ b/autogpt/commands/image_gen.py @@ -37,7 +37,7 @@ def generate_image(prompt: str, agent: Agent, size: int = 256) -> str: Returns: str: The filename of the image """ - filename = f"{agent.config.workspace_path}/{str(uuid.uuid4())}.jpg" + filename = agent.config.workspace_path / f"{str(uuid.uuid4())}.jpg" # DALL-E if agent.config.image_provider == "dalle": diff --git a/autogpt/config/ai_config.py b/autogpt/config/ai_config.py index a2952c9d..b47740f6 100644 --- a/autogpt/config/ai_config.py +++ b/autogpt/config/ai_config.py @@ -4,7 +4,6 @@ A module that contains the AIConfig class object that contains the configuration """ from __future__ import annotations -import os import platform from pathlib import Path from typing import TYPE_CHECKING, Optional @@ -16,9 +15,6 @@ if TYPE_CHECKING: from autogpt.models.command_registry import CommandRegistry from autogpt.prompts.generator import PromptGenerator -# Soon this will go in a folder where it remembers more stuff about the run(s) -SAVE_FILE = str(Path(os.getcwd()) / "ai_settings.yaml") - class AIConfig: """ @@ -57,14 +53,13 @@ class AIConfig: self.command_registry: CommandRegistry | None = None @staticmethod - def load(ai_settings_file: str = SAVE_FILE) -> "AIConfig": + def load(ai_settings_file: str | Path) -> "AIConfig": """ Returns class object with parameters (ai_name, ai_role, ai_goals, api_budget) loaded from yaml file if yaml file exists, else returns class with no parameters. Parameters: - ai_settings_file (int): The path to the config yaml file. - DEFAULT: "../ai_settings.yaml" + ai_settings_file (Path): The path to the config yaml file. Returns: cls (object): An instance of given cls object @@ -85,16 +80,15 @@ class AIConfig: for goal in config_params.get("ai_goals", []) ] api_budget = config_params.get("api_budget", 0.0) - # type: Type[AIConfig] + return AIConfig(ai_name, ai_role, ai_goals, api_budget) - def save(self, ai_settings_file: str = SAVE_FILE) -> None: + def save(self, ai_settings_file: str | Path) -> None: """ Saves the class parameters to the specified file yaml file path as a yaml file. Parameters: - ai_settings_file(str): The path to the config yaml file. - DEFAULT: "../ai_settings.yaml" + ai_settings_file (Path): The path to the config yaml file. Returns: None diff --git a/autogpt/config/config.py b/autogpt/config/config.py index 02cdbebe..5b371f5e 100644 --- a/autogpt/config/config.py +++ b/autogpt/config/config.py @@ -4,6 +4,7 @@ from __future__ import annotations import contextlib import os import re +from pathlib import Path from typing import Any, Dict, Optional, Union import yaml @@ -14,10 +15,8 @@ from pydantic import Field, validator from autogpt.core.configuration.schema import Configurable, SystemSettings from autogpt.plugins.plugins_config import PluginsConfig -AZURE_CONFIG_FILE = os.path.join(os.path.dirname(__file__), "../..", "azure.yaml") -PLUGINS_CONFIG_FILE = os.path.join( - os.path.dirname(__file__), "../..", "plugins_config.yaml" -) +AZURE_CONFIG_FILE = "azure.yaml" +PLUGINS_CONFIG_FILE = "plugins_config.yaml" GPT_4_MODEL = "gpt-4" GPT_3_MODEL = "gpt-3.5-turbo" @@ -47,7 +46,8 @@ class Config(SystemSettings, arbitrary_types_allowed=True): # Paths ai_settings_file: str = "ai_settings.yaml" prompt_settings_file: str = "prompt_settings.yaml" - workspace_path: Optional[str] = None + workdir: Path = None + workspace_path: Optional[Path] = None file_logger_path: Optional[str] = None # Model configuration fast_llm: str = "gpt-3.5-turbo" @@ -210,9 +210,10 @@ class ConfigBuilder(Configurable[Config]): default_settings = Config() @classmethod - def build_config_from_env(cls) -> Config: + def build_config_from_env(cls, workdir: Path) -> Config: """Initialize the Config class""" config_dict = { + "workdir": workdir, "authorise_key": os.getenv("AUTHORISE_COMMAND_KEY"), "exit_key": os.getenv("EXIT_KEY"), "plain_output": os.getenv("PLAIN_OUTPUT", "False") == "True", @@ -299,7 +300,9 @@ class ConfigBuilder(Configurable[Config]): config_dict["temperature"] = float(os.getenv("TEMPERATURE")) if config_dict["use_azure"]: - azure_config = cls.load_azure_config(config_dict["azure_config_file"]) + azure_config = cls.load_azure_config( + workdir / config_dict["azure_config_file"] + ) config_dict.update(azure_config) elif os.getenv("OPENAI_API_BASE_URL"): @@ -318,7 +321,7 @@ class ConfigBuilder(Configurable[Config]): # Set secondary config variables (that depend on other config variables) config.plugins_config = PluginsConfig.load_config( - config.plugins_config_file, + config.workdir / config.plugins_config_file, config.plugins_denylist, config.plugins_allowlist, ) @@ -326,13 +329,13 @@ class ConfigBuilder(Configurable[Config]): return config @classmethod - def load_azure_config(cls, config_file: str = AZURE_CONFIG_FILE) -> Dict[str, str]: + def load_azure_config(cls, config_file: Path) -> Dict[str, str]: """ Loads the configuration parameters for Azure hosting from the specified file path as a yaml file. Parameters: - config_file(str): The path to the config yaml file. DEFAULT: "../azure.yaml" + config_file (Path): The path to the config yaml file. Returns: Dict diff --git a/autogpt/logs/handlers.py b/autogpt/logs/handlers.py index c60b0575..1b9037d6 100644 --- a/autogpt/logs/handlers.py +++ b/autogpt/logs/handlers.py @@ -2,6 +2,7 @@ import json import logging import random import time +from pathlib import Path class ConsoleHandler(logging.StreamHandler): @@ -38,7 +39,7 @@ class TypingConsoleHandler(logging.StreamHandler): class JsonFileHandler(logging.FileHandler): - def __init__(self, filename: str, mode="a", encoding=None, delay=False): + def __init__(self, filename: str | Path, mode="a", encoding=None, delay=False): super().__init__(filename, mode, encoding, delay) def emit(self, record: logging.LogRecord): diff --git a/autogpt/logs/log_cycle.py b/autogpt/logs/log_cycle.py index f3cbf166..db8239f6 100644 --- a/autogpt/logs/log_cycle.py +++ b/autogpt/logs/log_cycle.py @@ -1,5 +1,6 @@ import json import os +from pathlib import Path from typing import Any, Dict, Union from .logger import logger @@ -23,38 +24,33 @@ class LogCycleHandler: def __init__(self): self.log_count_within_cycle = 0 - @staticmethod - def create_directory_if_not_exists(directory_path: str) -> None: - if not os.path.exists(directory_path): - os.makedirs(directory_path, exist_ok=True) - - def create_outer_directory(self, ai_name: str, created_at: str) -> str: - log_directory = logger.get_log_directory() - + def create_outer_directory(self, ai_name: str, created_at: str) -> Path: if os.environ.get("OVERWRITE_DEBUG") == "1": outer_folder_name = "auto_gpt" else: ai_name_short = self.get_agent_short_name(ai_name) outer_folder_name = f"{created_at}_{ai_name_short}" - outer_folder_path = os.path.join(log_directory, "DEBUG", outer_folder_name) - self.create_directory_if_not_exists(outer_folder_path) + outer_folder_path = logger.log_dir / "DEBUG" / outer_folder_name + if not outer_folder_path.exists(): + outer_folder_path.mkdir(parents=True) return outer_folder_path def get_agent_short_name(self, ai_name: str) -> str: return ai_name[:15].rstrip() if ai_name else DEFAULT_PREFIX - def create_inner_directory(self, outer_folder_path: str, cycle_count: int) -> str: + def create_inner_directory(self, outer_folder_path: Path, cycle_count: int) -> Path: nested_folder_name = str(cycle_count).zfill(3) - nested_folder_path = os.path.join(outer_folder_path, nested_folder_name) - self.create_directory_if_not_exists(nested_folder_path) + nested_folder_path = outer_folder_path / nested_folder_name + if not nested_folder_path.exists(): + nested_folder_path.mkdir() return nested_folder_path def create_nested_directory( self, ai_name: str, created_at: str, cycle_count: int - ) -> str: + ) -> Path: outer_folder_path = self.create_outer_directory(ai_name, created_at) nested_folder_path = self.create_inner_directory(outer_folder_path, cycle_count) @@ -75,14 +71,10 @@ class LogCycleHandler: data (Any): The data to be logged. file_name (str): The name of the file to save the logged data. """ - nested_folder_path = self.create_nested_directory( - ai_name, created_at, cycle_count - ) + cycle_log_dir = self.create_nested_directory(ai_name, created_at, cycle_count) json_data = json.dumps(data, ensure_ascii=False, indent=4) - log_file_path = os.path.join( - nested_folder_path, f"{self.log_count_within_cycle}_{file_name}" - ) + log_file_path = cycle_log_dir / f"{self.log_count_within_cycle}_{file_name}" logger.log_json(json_data, log_file_path) self.log_count_within_cycle += 1 diff --git a/autogpt/logs/logger.py b/autogpt/logs/logger.py index e4cedc36..5bb94792 100644 --- a/autogpt/logs/logger.py +++ b/autogpt/logs/logger.py @@ -2,7 +2,7 @@ from __future__ import annotations import logging -import os +from pathlib import Path from typing import TYPE_CHECKING, Any, Optional from colorama import Fore @@ -25,10 +25,10 @@ class Logger(metaclass=Singleton): def __init__(self): # create log directory if it doesn't exist - this_files_dir_path = os.path.dirname(__file__) - log_dir = os.path.join(this_files_dir_path, "../logs") - if not os.path.exists(log_dir): - os.makedirs(log_dir) + # TODO: use workdir from config + self.log_dir = Path(__file__).parent.parent.parent / "logs" + if not self.log_dir.exists(): + self.log_dir.mkdir() log_file = "activity.log" error_file = "error.log" @@ -46,9 +46,7 @@ class Logger(metaclass=Singleton): self.console_handler.setFormatter(console_formatter) # Info handler in activity.log - self.file_handler = logging.FileHandler( - os.path.join(log_dir, log_file), "a", "utf-8" - ) + self.file_handler = logging.FileHandler(self.log_dir / log_file, "a", "utf-8") self.file_handler.setLevel(logging.DEBUG) info_formatter = AutoGptFormatter( "%(asctime)s %(levelname)s %(title)s %(message_no_color)s" @@ -56,9 +54,7 @@ class Logger(metaclass=Singleton): self.file_handler.setFormatter(info_formatter) # Error handler error.log - error_handler = logging.FileHandler( - os.path.join(log_dir, error_file), "a", "utf-8" - ) + error_handler = logging.FileHandler(self.log_dir / error_file, "a", "utf-8") error_handler.setLevel(logging.ERROR) error_formatter = AutoGptFormatter( "%(asctime)s %(levelname)s %(module)s:%(funcName)s:%(lineno)d %(title)s" @@ -179,13 +175,9 @@ class Logger(metaclass=Singleton): self.typewriter_log("DOUBLE CHECK CONFIGURATION", Fore.YELLOW, additionalText) - def log_json(self, data: Any, file_name: str) -> None: - # Define log directory - this_files_dir_path = os.path.dirname(__file__) - log_dir = os.path.join(this_files_dir_path, "../logs") - + def log_json(self, data: Any, file_name: str | Path) -> None: # Create a handler for JSON files - json_file_path = os.path.join(log_dir, file_name) + json_file_path = self.log_dir / file_name json_data_handler = JsonFileHandler(json_file_path) json_data_handler.setFormatter(JsonFormatter()) @@ -194,10 +186,5 @@ class Logger(metaclass=Singleton): self.json_logger.debug(data) self.json_logger.removeHandler(json_data_handler) - def get_log_directory(self) -> str: - this_files_dir_path = os.path.dirname(__file__) - log_dir = os.path.join(this_files_dir_path, "../../logs") - return os.path.abspath(log_dir) - logger = Logger() diff --git a/autogpt/main.py b/autogpt/main.py index f388a1e9..ced13511 100644 --- a/autogpt/main.py +++ b/autogpt/main.py @@ -53,6 +53,7 @@ def run_auto_gpt( browser_name: str, allow_downloads: bool, skip_news: bool, + working_directory: Path, workspace_directory: str | Path, install_plugin_deps: bool, ai_name: Optional[str] = None, @@ -62,7 +63,8 @@ def run_auto_gpt( # Configure logging before we do anything else. logger.set_level(logging.DEBUG if debug else logging.INFO) - config = ConfigBuilder.build_config_from_env() + config = ConfigBuilder.build_config_from_env(workdir=working_directory) + # HACK: This is a hack to allow the config into the logger without having to pass it around everywhere # or import it directly. logger.config = config @@ -129,10 +131,10 @@ def run_auto_gpt( # TODO: have this directory live outside the repository (e.g. in a user's # home directory) and have it come in as a command line argument or part of # the env file. - workspace_directory = Workspace.get_workspace_directory(config, workspace_directory) + Workspace.set_workspace_directory(config, workspace_directory) # HACK: doing this here to collect some globals that depend on the workspace. - Workspace.build_file_logger_path(config, workspace_directory) + Workspace.build_file_logger_path(config, config.workspace_path) config.plugins = scan_plugins(config, config.debug_mode) # Create a CommandRegistry instance and scan default folder diff --git a/autogpt/memory/vector/providers/json_file.py b/autogpt/memory/vector/providers/json_file.py index b85ea8e6..79ff09f7 100644 --- a/autogpt/memory/vector/providers/json_file.py +++ b/autogpt/memory/vector/providers/json_file.py @@ -29,8 +29,7 @@ class JSONFileMemory(VectorMemoryProvider): Returns: None """ - workspace_path = Path(config.workspace_path) - self.file_path = workspace_path / f"{config.memory_index}.json" + self.file_path = config.workspace_path / f"{config.memory_index}.json" self.file_path.touch() logger.debug( f"Initialized {__class__.__name__} with index path {self.file_path}" diff --git a/autogpt/plugins/__init__.py b/autogpt/plugins/__init__.py index e9b864c6..69af98a6 100644 --- a/autogpt/plugins/__init__.py +++ b/autogpt/plugins/__init__.py @@ -23,10 +23,6 @@ if TYPE_CHECKING: from autogpt.logs import logger from autogpt.models.base_open_ai_plugin import BaseOpenAIPlugin -DEFAULT_PLUGINS_CONFIG_FILE = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "..", "..", "plugins_config.yaml" -) - def inspect_zip_for_modules(zip_path: str, debug: bool = False) -> list[str]: """ diff --git a/autogpt/plugins/plugins_config.py b/autogpt/plugins/plugins_config.py index 13b87130..dc313106 100644 --- a/autogpt/plugins/plugins_config.py +++ b/autogpt/plugins/plugins_config.py @@ -1,6 +1,6 @@ from __future__ import annotations -import os +from pathlib import Path from typing import Union import yaml @@ -28,7 +28,7 @@ class PluginsConfig(BaseModel): @classmethod def load_config( cls, - plugins_config_file: str, + plugins_config_file: Path, plugins_denylist: list[str], plugins_allowlist: list[str], ) -> "PluginsConfig": @@ -56,11 +56,11 @@ class PluginsConfig(BaseModel): @classmethod def deserialize_config_file( cls, - plugins_config_file: str, + plugins_config_file: Path, plugins_denylist: list[str], plugins_allowlist: list[str], ) -> dict[str, PluginConfig]: - if not os.path.exists(plugins_config_file): + if not plugins_config_file.is_file(): logger.warn("plugins_config.yaml does not exist, creating base config.") cls.create_empty_plugins_config( plugins_config_file, @@ -87,7 +87,7 @@ class PluginsConfig(BaseModel): @staticmethod def create_empty_plugins_config( - plugins_config_file: str, + plugins_config_file: Path, plugins_denylist: list[str], plugins_allowlist: list[str], ): diff --git a/autogpt/prompts/prompt.py b/autogpt/prompts/prompt.py index b5a0ec88..d275abc2 100644 --- a/autogpt/prompts/prompt.py +++ b/autogpt/prompts/prompt.py @@ -55,7 +55,7 @@ def construct_main_ai_config( Returns: str: The prompt string """ - ai_config = AIConfig.load(config.ai_settings_file) + ai_config = AIConfig.load(config.workdir / config.ai_settings_file) # Apply overrides if name: @@ -99,7 +99,7 @@ Continue ({config.authorise_key}/{config.exit_key}): """, if any([not ai_config.ai_name, not ai_config.ai_role, not ai_config.ai_goals]): ai_config = prompt_user(config) - ai_config.save(config.ai_settings_file) + ai_config.save(config.workdir / config.ai_settings_file) if config.restrict_to_workspace: logger.typewriter_log( diff --git a/autogpt/workspace/workspace.py b/autogpt/workspace/workspace.py index d4bc7f65..e580d4c4 100644 --- a/autogpt/workspace/workspace.py +++ b/autogpt/workspace/workspace.py @@ -152,15 +152,13 @@ class Workspace: config.file_logger_path = str(file_logger_path) @staticmethod - def get_workspace_directory( + def set_workspace_directory( config: Config, workspace_directory: Optional[str | Path] = None - ): + ) -> None: if workspace_directory is None: - workspace_directory = Path(__file__).parent / "auto_gpt_workspace" + workspace_directory = config.workdir / "auto_gpt_workspace" elif type(workspace_directory) == str: workspace_directory = Path(workspace_directory) # TODO: pass in the ai_settings file and the env file and have them cloned into # the workspace directory so we can bind them to the agent. - workspace_directory = Workspace.make_workspace(workspace_directory) - config.workspace_path = str(workspace_directory) - return workspace_directory + config.workspace_path = Workspace.make_workspace(workspace_directory) diff --git a/benchmarks.py b/benchmarks.py index e6482d0d..2b4e5fec 100644 --- a/benchmarks.py +++ b/benchmarks.py @@ -1,3 +1,5 @@ +from pathlib import Path + from autogpt.agents import Agent from autogpt.config import AIConfig, Config, ConfigBuilder from autogpt.main import COMMAND_CATEGORIES, run_interaction_loop @@ -6,6 +8,8 @@ from autogpt.models.command_registry import CommandRegistry from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT from autogpt.workspace import Workspace +PROJECT_DIR = Path().resolve() + def run_task(task) -> None: agent = bootstrap_agent(task) @@ -13,15 +17,14 @@ def run_task(task) -> None: def bootstrap_agent(task): - config = ConfigBuilder.build_config_from_env() + config = ConfigBuilder.build_config_from_env(workdir=PROJECT_DIR) config.continuous_mode = False config.temperature = 0 config.plain_output = True command_registry = get_command_registry(config) config.memory_backend = "no_memory" - workspace_directory = Workspace.get_workspace_directory(config) - workspace_directory_path = Workspace.make_workspace(workspace_directory) - Workspace.build_file_logger_path(config, workspace_directory_path) + Workspace.set_workspace_directory(config) + Workspace.build_file_logger_path(config, config.workspace_path) ai_config = AIConfig( ai_name="Auto-GPT", ai_role="a multi-purpose AI assistant.", @@ -34,7 +37,7 @@ def bootstrap_agent(task): ai_config=ai_config, config=config, triggering_prompt=DEFAULT_TRIGGERING_PROMPT, - workspace_directory=str(workspace_directory_path), + workspace_directory=str(config.workspace_path), ) diff --git a/docs/configuration/memory.md b/docs/configuration/memory.md index 452a6eac..56d06b46 100644 --- a/docs/configuration/memory.md +++ b/docs/configuration/memory.md @@ -173,7 +173,7 @@ options: # python data_ingestion.py --dir DataFolder --init --overlap 100 --max_length 2000 ``` -In the example above, the script initializes the memory, ingests all files within the `Auto-Gpt/autogpt/auto_gpt_workspace/DataFolder` directory into memory with an overlap between chunks of 100 and a maximum length of each chunk of 2000. +In the example above, the script initializes the memory, ingests all files within the `Auto-Gpt/auto_gpt_workspace/DataFolder` directory into memory with an overlap between chunks of 100 and a maximum length of each chunk of 2000. Note that you can also use the `--file` argument to ingest a single file into memory and that data_ingestion.py will only ingest files within the `/auto_gpt_workspace` directory. diff --git a/docs/setup.md b/docs/setup.md index ba2d6a5f..d0079e0f 100644 --- a/docs/setup.md +++ b/docs/setup.md @@ -51,7 +51,7 @@ Get your OpenAI API key from: [https://platform.openai.com/account/api-keys](htt - .env profiles: ["exclude-from-up"] volumes: - - ./auto_gpt_workspace:/app/autogpt/auto_gpt_workspace + - ./auto_gpt_workspace:/app/auto_gpt_workspace - ./data:/app/data ## allow auto-gpt to write logs to disk - ./logs:/app/logs diff --git a/tests/conftest.py b/tests/conftest.py index 854eb72a..2becc8bf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -48,7 +48,7 @@ def temp_plugins_config_file(): def config( temp_plugins_config_file: str, mocker: MockerFixture, workspace: Workspace ) -> Config: - config = ConfigBuilder.build_config_from_env() + config = ConfigBuilder.build_config_from_env(workspace.root.parent) if not os.environ.get("OPENAI_API_KEY"): os.environ["OPENAI_API_KEY"] = "sk-dummy" diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 7abbfcd5..066ca03d 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -161,7 +161,7 @@ azure_model_map: os.environ["USE_AZURE"] = "True" os.environ["AZURE_CONFIG_FILE"] = str(config_file) - config = ConfigBuilder.build_config_from_env() + config = ConfigBuilder.build_config_from_env(workspace.root.parent) assert config.openai_api_type == "azure" assert config.openai_api_base == "https://dummy.openai.azure.com" diff --git a/tests/unit/test_plugins.py b/tests/unit/test_plugins.py index 981715ac..7dc79e27 100644 --- a/tests/unit/test_plugins.py +++ b/tests/unit/test_plugins.py @@ -71,7 +71,7 @@ def test_create_base_config(config: Config): os.remove(config.plugins_config_file) plugins_config = PluginsConfig.load_config( - plugins_config_file=config.plugins_config_file, + plugins_config_file=config.workdir / config.plugins_config_file, plugins_denylist=config.plugins_denylist, plugins_allowlist=config.plugins_allowlist, ) @@ -107,7 +107,7 @@ def test_load_config(config: Config): # Load the config from disk plugins_config = PluginsConfig.load_config( - plugins_config_file=config.plugins_config_file, + plugins_config_file=config.workdir / config.plugins_config_file, plugins_denylist=config.plugins_denylist, plugins_allowlist=config.plugins_allowlist, )