mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-24 01:14:22 +01:00
apply black
This commit is contained in:
@@ -82,6 +82,7 @@ def main(
|
|||||||
from autogpt.plugins import scan_plugins
|
from autogpt.plugins import scan_plugins
|
||||||
from autogpt.prompts.prompt import construct_main_ai_config
|
from autogpt.prompts.prompt import construct_main_ai_config
|
||||||
from autogpt.utils import get_latest_bulletin
|
from autogpt.utils import get_latest_bulletin
|
||||||
|
|
||||||
if ctx.invoked_subcommand is None:
|
if ctx.invoked_subcommand is None:
|
||||||
cfg = Config()
|
cfg = Config()
|
||||||
# TODO: fill in llm values here
|
# TODO: fill in llm values here
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def on_planning(
|
def on_planning(
|
||||||
self, prompt: PromptGenerator, messages: List[Message]
|
self, prompt: PromptGenerator, messages: List[Message]
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
"""This method is called before the planning chat completion is done.
|
"""This method is called before the planning chat completion is done.
|
||||||
Args:
|
Args:
|
||||||
@@ -142,7 +142,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def pre_command(
|
def pre_command(
|
||||||
self, command_name: str, arguments: Dict[str, Any]
|
self, command_name: str, arguments: Dict[str, Any]
|
||||||
) -> Tuple[str, Dict[str, Any]]:
|
) -> Tuple[str, Dict[str, Any]]:
|
||||||
"""This method is called before the command is executed.
|
"""This method is called before the command is executed.
|
||||||
Args:
|
Args:
|
||||||
@@ -171,7 +171,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def can_handle_chat_completion(
|
def can_handle_chat_completion(
|
||||||
self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int
|
self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
handle the chat_completion method.
|
handle the chat_completion method.
|
||||||
@@ -185,7 +185,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def handle_chat_completion(
|
def handle_chat_completion(
|
||||||
self, messages: List[Message], model: str, temperature: float, max_tokens: int
|
self, messages: List[Message], model: str, temperature: float, max_tokens: int
|
||||||
) -> str:
|
) -> str:
|
||||||
"""This method is called when the chat completion is done.
|
"""This method is called when the chat completion is done.
|
||||||
Args:
|
Args:
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ def write_dict_to_json_file(data: dict, file_path: str) -> None:
|
|||||||
data (dict): Dictionary to write.
|
data (dict): Dictionary to write.
|
||||||
file_path (str): Path to the file.
|
file_path (str): Path to the file.
|
||||||
"""
|
"""
|
||||||
with open(file_path, 'w') as file:
|
with open(file_path, "w") as file:
|
||||||
json.dump(data, file, indent=4)
|
json.dump(data, file, indent=4)
|
||||||
|
|
||||||
|
|
||||||
@@ -64,35 +64,42 @@ def fetch_openai_plugins_manifest_and_spec(cfg: Config) -> dict:
|
|||||||
for url in cfg.plugins_openai:
|
for url in cfg.plugins_openai:
|
||||||
openai_plugin_client_dir = f"{cfg.plugins_dir}/openai/{urlparse(url).netloc}"
|
openai_plugin_client_dir = f"{cfg.plugins_dir}/openai/{urlparse(url).netloc}"
|
||||||
create_directory_if_not_exists(openai_plugin_client_dir)
|
create_directory_if_not_exists(openai_plugin_client_dir)
|
||||||
if not os.path.exists(f'{openai_plugin_client_dir}/ai-plugin.json'):
|
if not os.path.exists(f"{openai_plugin_client_dir}/ai-plugin.json"):
|
||||||
try:
|
try:
|
||||||
response = requests.get(f"{url}/.well-known/ai-plugin.json")
|
response = requests.get(f"{url}/.well-known/ai-plugin.json")
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
manifest = response.json()
|
manifest = response.json()
|
||||||
if manifest["schema_version"] != "v1":
|
if manifest["schema_version"] != "v1":
|
||||||
print(f"Unsupported manifest version: {manifest['schem_version']} for {url}")
|
print(
|
||||||
|
f"Unsupported manifest version: {manifest['schem_version']} for {url}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if manifest["api"]["type"] != "openapi":
|
if manifest["api"]["type"] != "openapi":
|
||||||
print(f"Unsupported API type: {manifest['api']['type']} for {url}")
|
print(
|
||||||
|
f"Unsupported API type: {manifest['api']['type']} for {url}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
write_dict_to_json_file(manifest, f'{openai_plugin_client_dir}/ai-plugin.json')
|
write_dict_to_json_file(
|
||||||
|
manifest, f"{openai_plugin_client_dir}/ai-plugin.json"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"Failed to fetch manifest for {url}: {response.status_code}")
|
print(f"Failed to fetch manifest for {url}: {response.status_code}")
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
print(f"Error while requesting manifest from {url}: {e}")
|
print(f"Error while requesting manifest from {url}: {e}")
|
||||||
else:
|
else:
|
||||||
print(f"Manifest for {url} already exists")
|
print(f"Manifest for {url} already exists")
|
||||||
manifest = json.load(open(f'{openai_plugin_client_dir}/ai-plugin.json'))
|
manifest = json.load(open(f"{openai_plugin_client_dir}/ai-plugin.json"))
|
||||||
if not os.path.exists(f'{openai_plugin_client_dir}/openapi.json'):
|
if not os.path.exists(f"{openai_plugin_client_dir}/openapi.json"):
|
||||||
openapi_spec = openapi_python_client._get_document(url=manifest["api"]["url"], path=None, timeout=5)
|
openapi_spec = openapi_python_client._get_document(
|
||||||
write_dict_to_json_file(openapi_spec, f'{openai_plugin_client_dir}/openapi.json')
|
url=manifest["api"]["url"], path=None, timeout=5
|
||||||
|
)
|
||||||
|
write_dict_to_json_file(
|
||||||
|
openapi_spec, f"{openai_plugin_client_dir}/openapi.json"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(f"OpenAPI spec for {url} already exists")
|
print(f"OpenAPI spec for {url} already exists")
|
||||||
openapi_spec = json.load(open(f'{openai_plugin_client_dir}/openapi.json'))
|
openapi_spec = json.load(open(f"{openai_plugin_client_dir}/openapi.json"))
|
||||||
manifests[url] = {
|
manifests[url] = {"manifest": manifest, "openapi_spec": openapi_spec}
|
||||||
'manifest': manifest,
|
|
||||||
'openapi_spec': openapi_spec
|
|
||||||
}
|
|
||||||
return manifests
|
return manifests
|
||||||
|
|
||||||
|
|
||||||
@@ -117,7 +124,9 @@ def create_directory_if_not_exists(directory_path: str) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def initialize_openai_plugins(manifests_specs: dict, cfg: Config, debug: bool = False) -> dict:
|
def initialize_openai_plugins(
|
||||||
|
manifests_specs: dict, cfg: Config, debug: bool = False
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Initialize OpenAI plugins.
|
Initialize OpenAI plugins.
|
||||||
Args:
|
Args:
|
||||||
@@ -127,39 +136,47 @@ def initialize_openai_plugins(manifests_specs: dict, cfg: Config, debug: bool =
|
|||||||
Returns:
|
Returns:
|
||||||
dict: per url dictionary of manifest, spec and client.
|
dict: per url dictionary of manifest, spec and client.
|
||||||
"""
|
"""
|
||||||
openai_plugins_dir = f'{cfg.plugins_dir}/openai'
|
openai_plugins_dir = f"{cfg.plugins_dir}/openai"
|
||||||
if create_directory_if_not_exists(openai_plugins_dir):
|
if create_directory_if_not_exists(openai_plugins_dir):
|
||||||
for url, manifest_spec in manifests_specs.items():
|
for url, manifest_spec in manifests_specs.items():
|
||||||
openai_plugin_client_dir = f'{openai_plugins_dir}/{urlparse(url).hostname}'
|
openai_plugin_client_dir = f"{openai_plugins_dir}/{urlparse(url).hostname}"
|
||||||
_meta_option = openapi_python_client.MetaType.SETUP,
|
_meta_option = (openapi_python_client.MetaType.SETUP,)
|
||||||
_config = OpenAPIConfig(**{
|
_config = OpenAPIConfig(
|
||||||
'project_name_override': 'client',
|
**{
|
||||||
'package_name_override': 'client',
|
"project_name_override": "client",
|
||||||
})
|
"package_name_override": "client",
|
||||||
|
}
|
||||||
|
)
|
||||||
prev_cwd = Path.cwd()
|
prev_cwd = Path.cwd()
|
||||||
os.chdir(openai_plugin_client_dir)
|
os.chdir(openai_plugin_client_dir)
|
||||||
Path('ai-plugin.json')
|
Path("ai-plugin.json")
|
||||||
if not os.path.exists('client'):
|
if not os.path.exists("client"):
|
||||||
client_results = openapi_python_client.create_new_client(
|
client_results = openapi_python_client.create_new_client(
|
||||||
url=manifest_spec['manifest']['api']['url'],
|
url=manifest_spec["manifest"]["api"]["url"],
|
||||||
path=None,
|
path=None,
|
||||||
meta=_meta_option,
|
meta=_meta_option,
|
||||||
config=_config,
|
config=_config,
|
||||||
)
|
)
|
||||||
if client_results:
|
if client_results:
|
||||||
print(f"Error creating OpenAPI client: {client_results[0].header} \n"
|
print(
|
||||||
f" details: {client_results[0].detail}")
|
f"Error creating OpenAPI client: {client_results[0].header} \n"
|
||||||
|
f" details: {client_results[0].detail}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
spec = importlib.util.spec_from_file_location('client', 'client/client/client.py')
|
spec = importlib.util.spec_from_file_location(
|
||||||
|
"client", "client/client/client.py"
|
||||||
|
)
|
||||||
module = importlib.util.module_from_spec(spec)
|
module = importlib.util.module_from_spec(spec)
|
||||||
spec.loader.exec_module(module)
|
spec.loader.exec_module(module)
|
||||||
client = module.Client(base_url=url)
|
client = module.Client(base_url=url)
|
||||||
os.chdir(prev_cwd)
|
os.chdir(prev_cwd)
|
||||||
manifest_spec['client'] = client
|
manifest_spec["client"] = client
|
||||||
return manifests_specs
|
return manifests_specs
|
||||||
|
|
||||||
|
|
||||||
def instantiate_openai_plugin_clients(manifests_specs_clients: dict, cfg: Config, debug: bool = False) -> dict:
|
def instantiate_openai_plugin_clients(
|
||||||
|
manifests_specs_clients: dict, cfg: Config, debug: bool = False
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Instantiates BaseOpenAIPlugin instances for each OpenAI plugin.
|
Instantiates BaseOpenAIPlugin instances for each OpenAI plugin.
|
||||||
Args:
|
Args:
|
||||||
@@ -203,16 +220,18 @@ def scan_plugins(cfg: Config, debug: bool = False) -> List[AutoGPTPluginTemplate
|
|||||||
a_module = getattr(zipped_module, key)
|
a_module = getattr(zipped_module, key)
|
||||||
a_keys = dir(a_module)
|
a_keys = dir(a_module)
|
||||||
if (
|
if (
|
||||||
"_abc_impl" in a_keys
|
"_abc_impl" in a_keys
|
||||||
and a_module.__name__ != "AutoGPTPluginTemplate"
|
and a_module.__name__ != "AutoGPTPluginTemplate"
|
||||||
and blacklist_whitelist_check(a_module.__name__, cfg)
|
and blacklist_whitelist_check(a_module.__name__, cfg)
|
||||||
):
|
):
|
||||||
loaded_plugins.append(a_module())
|
loaded_plugins.append(a_module())
|
||||||
# OpenAI plugins
|
# OpenAI plugins
|
||||||
if cfg.plugins_openai:
|
if cfg.plugins_openai:
|
||||||
manifests_specs = fetch_openai_plugins_manifest_and_spec(cfg)
|
manifests_specs = fetch_openai_plugins_manifest_and_spec(cfg)
|
||||||
if manifests_specs.keys():
|
if manifests_specs.keys():
|
||||||
manifests_specs_clients = initialize_openai_plugins(manifests_specs, cfg, debug)
|
manifests_specs_clients = initialize_openai_plugins(
|
||||||
|
manifests_specs, cfg, debug
|
||||||
|
)
|
||||||
for url, openai_plugin_meta in manifests_specs_clients.items():
|
for url, openai_plugin_meta in manifests_specs_clients.items():
|
||||||
if blacklist_whitelist_check(url, cfg):
|
if blacklist_whitelist_check(url, cfg):
|
||||||
plugin = BaseOpenAIPlugin(openai_plugin_meta)
|
plugin = BaseOpenAIPlugin(openai_plugin_meta)
|
||||||
|
|||||||
@@ -19,10 +19,10 @@ def dummy_plugin():
|
|||||||
"manifest": {
|
"manifest": {
|
||||||
"name_for_model": "Dummy",
|
"name_for_model": "Dummy",
|
||||||
"schema_version": "1.0",
|
"schema_version": "1.0",
|
||||||
"description_for_model": "A dummy plugin for testing purposes"
|
"description_for_model": "A dummy plugin for testing purposes",
|
||||||
},
|
},
|
||||||
"client": None,
|
"client": None,
|
||||||
"openapi_spec": None
|
"openapi_spec": None,
|
||||||
}
|
}
|
||||||
return DummyPlugin(manifests_specs_clients)
|
return DummyPlugin(manifests_specs_clients)
|
||||||
|
|
||||||
|
|||||||
@@ -10,11 +10,11 @@ from autogpt.plugins import (
|
|||||||
PLUGINS_TEST_DIR = "tests/unit/data/test_plugins"
|
PLUGINS_TEST_DIR = "tests/unit/data/test_plugins"
|
||||||
PLUGIN_TEST_ZIP_FILE = "Auto-GPT-Plugin-Test-master.zip"
|
PLUGIN_TEST_ZIP_FILE = "Auto-GPT-Plugin-Test-master.zip"
|
||||||
PLUGIN_TEST_INIT_PY = "Auto-GPT-Plugin-Test-master/src/auto_gpt_vicuna/__init__.py"
|
PLUGIN_TEST_INIT_PY = "Auto-GPT-Plugin-Test-master/src/auto_gpt_vicuna/__init__.py"
|
||||||
PLUGIN_TEST_OPENAI = 'https://weathergpt.vercel.app/'
|
PLUGIN_TEST_OPENAI = "https://weathergpt.vercel.app/"
|
||||||
|
|
||||||
|
|
||||||
def test_inspect_zip_for_module():
|
def test_inspect_zip_for_module():
|
||||||
result = inspect_zip_for_module(str(f'{PLUGINS_TEST_DIR}/{PLUGIN_TEST_ZIP_FILE}'))
|
result = inspect_zip_for_module(str(f"{PLUGINS_TEST_DIR}/{PLUGIN_TEST_ZIP_FILE}"))
|
||||||
assert result == PLUGIN_TEST_INIT_PY
|
assert result == PLUGIN_TEST_INIT_PY
|
||||||
|
|
||||||
|
|
||||||
@@ -27,37 +27,56 @@ def mock_config_blacklist_whitelist_check():
|
|||||||
return MockConfig()
|
return MockConfig()
|
||||||
|
|
||||||
|
|
||||||
def test_blacklist_whitelist_check_blacklist(mock_config_blacklist_whitelist_check,
|
def test_blacklist_whitelist_check_blacklist(
|
||||||
monkeypatch):
|
mock_config_blacklist_whitelist_check, monkeypatch
|
||||||
|
):
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "y")
|
monkeypatch.setattr("builtins.input", lambda _: "y")
|
||||||
assert not blacklist_whitelist_check("BadPlugin", mock_config_blacklist_whitelist_check)
|
assert not blacklist_whitelist_check(
|
||||||
|
"BadPlugin", mock_config_blacklist_whitelist_check
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_blacklist_whitelist_check_whitelist(mock_config_blacklist_whitelist_check, monkeypatch):
|
def test_blacklist_whitelist_check_whitelist(
|
||||||
|
mock_config_blacklist_whitelist_check, monkeypatch
|
||||||
|
):
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "y")
|
monkeypatch.setattr("builtins.input", lambda _: "y")
|
||||||
assert blacklist_whitelist_check("GoodPlugin", mock_config_blacklist_whitelist_check)
|
assert blacklist_whitelist_check(
|
||||||
|
"GoodPlugin", mock_config_blacklist_whitelist_check
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_blacklist_whitelist_check_user_input_yes(mock_config_blacklist_whitelist_check, monkeypatch):
|
def test_blacklist_whitelist_check_user_input_yes(
|
||||||
|
mock_config_blacklist_whitelist_check, monkeypatch
|
||||||
|
):
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "y")
|
monkeypatch.setattr("builtins.input", lambda _: "y")
|
||||||
assert blacklist_whitelist_check("UnknownPlugin", mock_config_blacklist_whitelist_check)
|
assert blacklist_whitelist_check(
|
||||||
|
"UnknownPlugin", mock_config_blacklist_whitelist_check
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_blacklist_whitelist_check_user_input_no(mock_config_blacklist_whitelist_check, monkeypatch):
|
def test_blacklist_whitelist_check_user_input_no(
|
||||||
|
mock_config_blacklist_whitelist_check, monkeypatch
|
||||||
|
):
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "n")
|
monkeypatch.setattr("builtins.input", lambda _: "n")
|
||||||
assert not blacklist_whitelist_check("UnknownPlugin", mock_config_blacklist_whitelist_check)
|
assert not blacklist_whitelist_check(
|
||||||
|
"UnknownPlugin", mock_config_blacklist_whitelist_check
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_blacklist_whitelist_check_user_input_invalid(mock_config_blacklist_whitelist_check, monkeypatch):
|
def test_blacklist_whitelist_check_user_input_invalid(
|
||||||
|
mock_config_blacklist_whitelist_check, monkeypatch
|
||||||
|
):
|
||||||
monkeypatch.setattr("builtins.input", lambda _: "invalid")
|
monkeypatch.setattr("builtins.input", lambda _: "invalid")
|
||||||
assert not blacklist_whitelist_check("UnknownPlugin", mock_config_blacklist_whitelist_check)
|
assert not blacklist_whitelist_check(
|
||||||
|
"UnknownPlugin", mock_config_blacklist_whitelist_check
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def config_with_plugins():
|
def config_with_plugins():
|
||||||
cfg = Config()
|
cfg = Config()
|
||||||
cfg.plugins_dir = PLUGINS_TEST_DIR
|
cfg.plugins_dir = PLUGINS_TEST_DIR
|
||||||
cfg.plugins_openai = ['https://weathergpt.vercel.app/']
|
cfg.plugins_openai = ["https://weathergpt.vercel.app/"]
|
||||||
return cfg
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user