diff --git a/autogpt/command_decorator.py b/autogpt/command_decorator.py index d082d9bf..9a6f58ae 100644 --- a/autogpt/command_decorator.py +++ b/autogpt/command_decorator.py @@ -1,7 +1,11 @@ -import functools -from typing import Any, Callable, Optional, TypedDict +from __future__ import annotations + +import functools +from typing import TYPE_CHECKING, Any, Callable, Optional, TypedDict + +if TYPE_CHECKING: + from autogpt.config import Config -from autogpt.config import Config from autogpt.models.command import Command, CommandParameter # Unique identifier for auto-gpt commands diff --git a/autogpt/config/config.py b/autogpt/config/config.py index c0d30910..93fc42e9 100644 --- a/autogpt/config/config.py +++ b/autogpt/config/config.py @@ -13,6 +13,7 @@ from colorama import Fore from pydantic import Field, validator from autogpt.core.configuration.schema import Configurable, SystemSettings +from autogpt.llm.providers.openai import OPEN_AI_CHAT_MODELS from autogpt.plugins.plugins_config import PluginsConfig AI_SETTINGS_FILE = "ai_settings.yaml" @@ -147,6 +148,15 @@ class Config(SystemSettings, arbitrary_types_allowed=True): ), f"Plugins must subclass AutoGPTPluginTemplate; {p} is a template instance" return p + @validator("openai_functions") + def validate_openai_functions(cls, v: bool, values: dict[str, Any]): + if v: + smart_llm = values["smart_llm"] + assert OPEN_AI_CHAT_MODELS[smart_llm].supports_functions, ( + f"Model {smart_llm} does not support OpenAI Functions. " + "Please disable OPENAI_FUNCTIONS or choose a suitable model." + ) + def get_openai_credentials(self, model: str) -> dict[str, str]: credentials = { "api_key": self.openai_api_key, diff --git a/autogpt/llm/base.py b/autogpt/llm/base.py index 14a146b3..1ac00112 100644 --- a/autogpt/llm/base.py +++ b/autogpt/llm/base.py @@ -67,6 +67,8 @@ class CompletionModelInfo(ModelInfo): class ChatModelInfo(CompletionModelInfo): """Struct for chat model information.""" + supports_functions: bool = False + @dataclass class TextModelInfo(CompletionModelInfo): diff --git a/autogpt/llm/providers/openai.py b/autogpt/llm/providers/openai.py index 6e746142..35c652f0 100644 --- a/autogpt/llm/providers/openai.py +++ b/autogpt/llm/providers/openai.py @@ -36,12 +36,14 @@ OPEN_AI_CHAT_MODELS = { prompt_token_cost=0.0015, completion_token_cost=0.002, max_tokens=4096, + supports_functions=True, ), ChatModelInfo( name="gpt-3.5-turbo-16k-0613", prompt_token_cost=0.003, completion_token_cost=0.004, max_tokens=16384, + supports_functions=True, ), ChatModelInfo( name="gpt-4-0314", @@ -54,6 +56,7 @@ OPEN_AI_CHAT_MODELS = { prompt_token_cost=0.03, completion_token_cost=0.06, max_tokens=8191, + supports_functions=True, ), ChatModelInfo( name="gpt-4-32k-0314", @@ -66,6 +69,7 @@ OPEN_AI_CHAT_MODELS = { prompt_token_cost=0.06, completion_token_cost=0.12, max_tokens=32768, + supports_functions=True, ), ] } diff --git a/autogpt/models/command.py b/autogpt/models/command.py index 61469786..a7cec509 100644 --- a/autogpt/models/command.py +++ b/autogpt/models/command.py @@ -1,6 +1,9 @@ -from typing import Any, Callable, Optional +from __future__ import annotations -from autogpt.config import Config +from typing import TYPE_CHECKING, Any, Callable, Optional + +if TYPE_CHECKING: + from autogpt.config import Config from .command_parameter import CommandParameter