mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-17 05:54:26 +01:00
refactor(agent): Remove redundant parsing functions in config classes
Pydantic has built-in typecasting logic, so converting to Python builtin types and enums is done automatically.
This commit is contained in:
@@ -63,10 +63,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
|
||||
# File storage
|
||||
file_storage_backend: FileStorageBackendName = UserConfigurable(
|
||||
default=FileStorageBackendName.LOCAL,
|
||||
from_env=lambda: FileStorageBackendName(v)
|
||||
if (v := os.getenv("FILE_STORAGE_BACKEND"))
|
||||
else None,
|
||||
default=FileStorageBackendName.LOCAL, from_env="FILE_STORAGE_BACKEND"
|
||||
)
|
||||
|
||||
##########################
|
||||
@@ -74,27 +71,23 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
##########################
|
||||
# Paths
|
||||
ai_settings_file: Path = UserConfigurable(
|
||||
default=AI_SETTINGS_FILE,
|
||||
from_env=lambda: Path(f) if (f := os.getenv("AI_SETTINGS_FILE")) else None,
|
||||
default=AI_SETTINGS_FILE, from_env="AI_SETTINGS_FILE"
|
||||
)
|
||||
prompt_settings_file: Path = UserConfigurable(
|
||||
default=PROMPT_SETTINGS_FILE,
|
||||
from_env=lambda: Path(f) if (f := os.getenv("PROMPT_SETTINGS_FILE")) else None,
|
||||
from_env="PROMPT_SETTINGS_FILE",
|
||||
)
|
||||
|
||||
# Model configuration
|
||||
fast_llm: str = UserConfigurable(
|
||||
default="gpt-3.5-turbo-0125",
|
||||
from_env=lambda: os.getenv("FAST_LLM"),
|
||||
from_env="FAST_LLM",
|
||||
)
|
||||
smart_llm: str = UserConfigurable(
|
||||
default="gpt-4-turbo-preview",
|
||||
from_env=lambda: os.getenv("SMART_LLM"),
|
||||
)
|
||||
temperature: float = UserConfigurable(
|
||||
default=0,
|
||||
from_env=lambda: float(v) if (v := os.getenv("TEMPERATURE")) else None,
|
||||
from_env="SMART_LLM",
|
||||
)
|
||||
temperature: float = UserConfigurable(default=0, from_env="TEMPERATURE")
|
||||
openai_functions: bool = UserConfigurable(
|
||||
default=False, from_env=lambda: os.getenv("OPENAI_FUNCTIONS", "False") == "True"
|
||||
)
|
||||
@@ -115,10 +108,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
memory_backend: str = UserConfigurable("json_file", from_env="MEMORY_BACKEND")
|
||||
memory_index: str = UserConfigurable("auto-gpt-memory", from_env="MEMORY_INDEX")
|
||||
redis_host: str = UserConfigurable("localhost", from_env="REDIS_HOST")
|
||||
redis_port: int = UserConfigurable(
|
||||
default=6379,
|
||||
from_env=lambda: int(v) if (v := os.getenv("REDIS_PORT")) else None,
|
||||
)
|
||||
redis_port: int = UserConfigurable(default=6379, from_env="REDIS_PORT")
|
||||
redis_password: str = UserConfigurable("", from_env="REDIS_PASSWORD")
|
||||
wipe_redis_on_start: bool = UserConfigurable(
|
||||
default=True,
|
||||
@@ -170,10 +160,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
sd_webui_url: Optional[str] = UserConfigurable(
|
||||
default="http://localhost:7860", from_env="SD_WEBUI_URL"
|
||||
)
|
||||
image_size: int = UserConfigurable(
|
||||
default=256,
|
||||
from_env=lambda: int(v) if (v := os.getenv("IMAGE_SIZE")) else None,
|
||||
)
|
||||
image_size: int = UserConfigurable(default=256, from_env="IMAGE_SIZE")
|
||||
|
||||
# Audio to text
|
||||
audio_to_text_provider: str = UserConfigurable(
|
||||
@@ -198,8 +185,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
###################
|
||||
plugins_dir: str = UserConfigurable("plugins", from_env="PLUGINS_DIR")
|
||||
plugins_config_file: Path = UserConfigurable(
|
||||
default=PLUGINS_CONFIG_FILE,
|
||||
from_env=lambda: Path(f) if (f := os.getenv("PLUGINS_CONFIG_FILE")) else None,
|
||||
default=PLUGINS_CONFIG_FILE, from_env="PLUGINS_CONFIG_FILE"
|
||||
)
|
||||
plugins_config: PluginsConfig = Field(
|
||||
default_factory=lambda: PluginsConfig(plugins={})
|
||||
@@ -223,8 +209,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
# OpenAI
|
||||
openai_credentials: Optional[OpenAICredentials] = None
|
||||
azure_config_file: Optional[Path] = UserConfigurable(
|
||||
default=AZURE_CONFIG_FILE,
|
||||
from_env=lambda: Path(f) if (f := os.getenv("AZURE_CONFIG_FILE")) else None,
|
||||
default=AZURE_CONFIG_FILE, from_env="AZURE_CONFIG_FILE"
|
||||
)
|
||||
|
||||
# Github
|
||||
@@ -234,7 +219,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
# Google
|
||||
google_api_key: Optional[str] = UserConfigurable(from_env="GOOGLE_API_KEY")
|
||||
google_custom_search_engine_id: Optional[str] = UserConfigurable(
|
||||
from_env=lambda: os.getenv("GOOGLE_CUSTOM_SEARCH_ENGINE_ID"),
|
||||
from_env="GOOGLE_CUSTOM_SEARCH_ENGINE_ID",
|
||||
)
|
||||
|
||||
# Huggingface
|
||||
|
||||
@@ -29,9 +29,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class S3FileStorageConfiguration(FileStorageConfiguration):
|
||||
bucket: str = UserConfigurable("autogpt", from_env="STORAGE_BUCKET")
|
||||
s3_endpoint_url: Optional[SecretStr] = UserConfigurable(
|
||||
from_env=lambda: SecretStr(v) if (v := os.getenv("S3_ENDPOINT_URL")) else None
|
||||
)
|
||||
s3_endpoint_url: Optional[SecretStr] = UserConfigurable(from_env="S3_ENDPOINT_URL")
|
||||
|
||||
|
||||
class S3FileStorage(FileStorage):
|
||||
|
||||
@@ -57,8 +57,7 @@ class LoggingConfig(SystemConfiguration):
|
||||
|
||||
# Console output
|
||||
log_format: LogFormatName = UserConfigurable(
|
||||
default=LogFormatName.SIMPLE,
|
||||
from_env=lambda: LogFormatName(os.getenv("LOG_FORMAT", "simple")),
|
||||
default=LogFormatName.SIMPLE, from_env="LOG_FORMAT"
|
||||
)
|
||||
plain_console_output: bool = UserConfigurable(
|
||||
default=False,
|
||||
@@ -69,8 +68,8 @@ class LoggingConfig(SystemConfiguration):
|
||||
log_dir: Path = LOG_DIR
|
||||
log_file_format: Optional[LogFormatName] = UserConfigurable(
|
||||
default=LogFormatName.SIMPLE,
|
||||
from_env=lambda: LogFormatName(
|
||||
os.getenv("LOG_FILE_FORMAT", os.getenv("LOG_FORMAT", "simple"))
|
||||
from_env=lambda: os.getenv(
|
||||
"LOG_FILE_FORMAT", os.getenv("LOG_FORMAT", "simple")
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user