mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2026-01-14 11:34:20 +01:00
Verify model compatibility if OPENAI_FUNCTIONS is set (#5075)
Co-authored-by: Luke <2609441+lc0rp@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
b7f1df3e1d
commit
3651d22147
@@ -1,7 +1,11 @@
|
||||
import functools
|
||||
from typing import Any, Callable, Optional, TypedDict
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional, TypedDict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt.config import Config
|
||||
|
||||
from autogpt.config import Config
|
||||
from autogpt.models.command import Command, CommandParameter
|
||||
|
||||
# Unique identifier for auto-gpt commands
|
||||
|
||||
@@ -13,6 +13,7 @@ from colorama import Fore
|
||||
from pydantic import Field, validator
|
||||
|
||||
from autogpt.core.configuration.schema import Configurable, SystemSettings
|
||||
from autogpt.llm.providers.openai import OPEN_AI_CHAT_MODELS
|
||||
from autogpt.plugins.plugins_config import PluginsConfig
|
||||
|
||||
AI_SETTINGS_FILE = "ai_settings.yaml"
|
||||
@@ -147,6 +148,15 @@ class Config(SystemSettings, arbitrary_types_allowed=True):
|
||||
), f"Plugins must subclass AutoGPTPluginTemplate; {p} is a template instance"
|
||||
return p
|
||||
|
||||
@validator("openai_functions")
|
||||
def validate_openai_functions(cls, v: bool, values: dict[str, Any]):
|
||||
if v:
|
||||
smart_llm = values["smart_llm"]
|
||||
assert OPEN_AI_CHAT_MODELS[smart_llm].supports_functions, (
|
||||
f"Model {smart_llm} does not support OpenAI Functions. "
|
||||
"Please disable OPENAI_FUNCTIONS or choose a suitable model."
|
||||
)
|
||||
|
||||
def get_openai_credentials(self, model: str) -> dict[str, str]:
|
||||
credentials = {
|
||||
"api_key": self.openai_api_key,
|
||||
|
||||
@@ -67,6 +67,8 @@ class CompletionModelInfo(ModelInfo):
|
||||
class ChatModelInfo(CompletionModelInfo):
|
||||
"""Struct for chat model information."""
|
||||
|
||||
supports_functions: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class TextModelInfo(CompletionModelInfo):
|
||||
|
||||
@@ -36,12 +36,14 @@ OPEN_AI_CHAT_MODELS = {
|
||||
prompt_token_cost=0.0015,
|
||||
completion_token_cost=0.002,
|
||||
max_tokens=4096,
|
||||
supports_functions=True,
|
||||
),
|
||||
ChatModelInfo(
|
||||
name="gpt-3.5-turbo-16k-0613",
|
||||
prompt_token_cost=0.003,
|
||||
completion_token_cost=0.004,
|
||||
max_tokens=16384,
|
||||
supports_functions=True,
|
||||
),
|
||||
ChatModelInfo(
|
||||
name="gpt-4-0314",
|
||||
@@ -54,6 +56,7 @@ OPEN_AI_CHAT_MODELS = {
|
||||
prompt_token_cost=0.03,
|
||||
completion_token_cost=0.06,
|
||||
max_tokens=8191,
|
||||
supports_functions=True,
|
||||
),
|
||||
ChatModelInfo(
|
||||
name="gpt-4-32k-0314",
|
||||
@@ -66,6 +69,7 @@ OPEN_AI_CHAT_MODELS = {
|
||||
prompt_token_cost=0.06,
|
||||
completion_token_cost=0.12,
|
||||
max_tokens=32768,
|
||||
supports_functions=True,
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
from typing import Any, Callable, Optional
|
||||
from __future__ import annotations
|
||||
|
||||
from autogpt.config import Config
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt.config import Config
|
||||
|
||||
from .command_parameter import CommandParameter
|
||||
|
||||
|
||||
Reference in New Issue
Block a user