mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-21 16:04:21 +01:00
Fix all commands and cleanup
This commit is contained in:
@@ -3,33 +3,9 @@ import json
|
|||||||
from typing import Dict, List, NoReturn, Union
|
from typing import Dict, List, NoReturn, Union
|
||||||
|
|
||||||
from autogpt.agent.agent_manager import AgentManager
|
from autogpt.agent.agent_manager import AgentManager
|
||||||
from autogpt.commands.analyze_code import analyze_code
|
|
||||||
from autogpt.commands.audio_text import read_audio_from_file
|
|
||||||
from autogpt.commands.command import CommandRegistry, command
|
from autogpt.commands.command import CommandRegistry, command
|
||||||
from autogpt.commands.evaluate_code import evaluate_code
|
|
||||||
from autogpt.commands.execute_code import (
|
|
||||||
execute_python_file,
|
|
||||||
execute_shell,
|
|
||||||
execute_shell_popen,
|
|
||||||
)
|
|
||||||
from autogpt.commands.file_operations import (
|
|
||||||
append_to_file,
|
|
||||||
delete_file,
|
|
||||||
download_file,
|
|
||||||
read_file,
|
|
||||||
search_files,
|
|
||||||
write_to_file,
|
|
||||||
)
|
|
||||||
from autogpt.commands.git_operations import clone_repository
|
|
||||||
from autogpt.commands.google_search import google_official_search, google_search
|
|
||||||
from autogpt.commands.image_gen import generate_image
|
|
||||||
from autogpt.commands.improve_code import improve_code
|
|
||||||
from autogpt.commands.twitter import send_tweet
|
|
||||||
from autogpt.commands.web_requests import scrape_links, scrape_text
|
from autogpt.commands.web_requests import scrape_links, scrape_text
|
||||||
from autogpt.commands.web_selenium import browse_website
|
|
||||||
from autogpt.commands.write_tests import write_tests
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.json_utils.json_fix_llm import fix_and_parse_json
|
|
||||||
from autogpt.memory import get_memory
|
from autogpt.memory import get_memory
|
||||||
from autogpt.processing.text import summarize_text
|
from autogpt.processing.text import summarize_text
|
||||||
from autogpt.prompts.generator import PromptGenerator
|
from autogpt.prompts.generator import PromptGenerator
|
||||||
@@ -137,26 +113,8 @@ def execute_command(
|
|||||||
command_name = map_command_synonyms(command_name.lower())
|
command_name = map_command_synonyms(command_name.lower())
|
||||||
|
|
||||||
if command_name == "memory_add":
|
if command_name == "memory_add":
|
||||||
return memory.add(arguments["string"])
|
return get_memory(CFG).add(arguments["string"])
|
||||||
elif command_name == "get_text_summary":
|
|
||||||
return get_text_summary(arguments["url"], arguments["question"])
|
|
||||||
elif command_name == "get_hyperlinks":
|
|
||||||
return get_hyperlinks(arguments["url"])
|
|
||||||
elif command_name == "analyze_code":
|
|
||||||
return analyze_code(arguments["code"])
|
|
||||||
elif command_name == "download_file":
|
|
||||||
if not CFG.allow_downloads:
|
|
||||||
return "Error: You do not have user authorization to download files locally."
|
|
||||||
return download_file(arguments["url"], arguments["file"])
|
|
||||||
elif command_name == "execute_shell_popen":
|
|
||||||
if CFG.execute_local_commands:
|
|
||||||
return execute_shell_popen(arguments["command_line"])
|
|
||||||
else:
|
|
||||||
return (
|
|
||||||
"You are not allowed to run local shell commands. To execute"
|
|
||||||
" shell commands, EXECUTE_LOCAL_COMMANDS must be set to 'True' "
|
|
||||||
"in your config. Do not attempt to bypass the restriction."
|
|
||||||
)
|
|
||||||
# TODO: Change these to take in a file rather than pasted code, if
|
# TODO: Change these to take in a file rather than pasted code, if
|
||||||
# non-file is given, return instructions "Input should be a python
|
# non-file is given, return instructions "Input should be a python
|
||||||
# filepath, write your code to file and try again
|
# filepath, write your code to file and try again
|
||||||
@@ -177,6 +135,7 @@ def execute_command(
|
|||||||
return f"Error: {str(e)}"
|
return f"Error: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@command("get_text_summary", "Get text summary", '"url": "<url>", "question": "<question>"')
|
||||||
def get_text_summary(url: str, question: str) -> str:
|
def get_text_summary(url: str, question: str) -> str:
|
||||||
"""Return the results of a Google search
|
"""Return the results of a Google search
|
||||||
|
|
||||||
@@ -192,6 +151,7 @@ def get_text_summary(url: str, question: str) -> str:
|
|||||||
return f""" "Result" : {summary}"""
|
return f""" "Result" : {summary}"""
|
||||||
|
|
||||||
|
|
||||||
|
@command("get_hyperlinks", "Get text summary", '"url": "<url>"')
|
||||||
def get_hyperlinks(url: str) -> Union[str, List[str]]:
|
def get_hyperlinks(url: str) -> Union[str, List[str]]:
|
||||||
"""Return the results of a Google search
|
"""Return the results of a Google search
|
||||||
|
|
||||||
|
|||||||
@@ -130,13 +130,14 @@ def main(
|
|||||||
cfg.set_plugins(scan_plugins(cfg, cfg.debug_mode))
|
cfg.set_plugins(scan_plugins(cfg, cfg.debug_mode))
|
||||||
# Create a CommandRegistry instance and scan default folder
|
# Create a CommandRegistry instance and scan default folder
|
||||||
command_registry = CommandRegistry()
|
command_registry = CommandRegistry()
|
||||||
|
command_registry.import_commands("autogpt.commands.analyze_code")
|
||||||
command_registry.import_commands("autogpt.commands.audio_text")
|
command_registry.import_commands("autogpt.commands.audio_text")
|
||||||
command_registry.import_commands("autogpt.commands.evaluate_code")
|
|
||||||
command_registry.import_commands("autogpt.commands.execute_code")
|
command_registry.import_commands("autogpt.commands.execute_code")
|
||||||
command_registry.import_commands("autogpt.commands.file_operations")
|
command_registry.import_commands("autogpt.commands.file_operations")
|
||||||
command_registry.import_commands("autogpt.commands.git_operations")
|
command_registry.import_commands("autogpt.commands.git_operations")
|
||||||
command_registry.import_commands("autogpt.commands.google_search")
|
command_registry.import_commands("autogpt.commands.google_search")
|
||||||
command_registry.import_commands("autogpt.commands.image_gen")
|
command_registry.import_commands("autogpt.commands.image_gen")
|
||||||
|
command_registry.import_commands("autogpt.commands.improve_code")
|
||||||
command_registry.import_commands("autogpt.commands.twitter")
|
command_registry.import_commands("autogpt.commands.twitter")
|
||||||
command_registry.import_commands("autogpt.commands.web_selenium")
|
command_registry.import_commands("autogpt.commands.web_selenium")
|
||||||
command_registry.import_commands("autogpt.commands.write_tests")
|
command_registry.import_commands("autogpt.commands.write_tests")
|
||||||
|
|||||||
@@ -1,9 +1,15 @@
|
|||||||
"""Code evaluation module."""
|
"""Code evaluation module."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from autogpt.commands.command import command
|
||||||
from autogpt.llm_utils import call_ai_function
|
from autogpt.llm_utils import call_ai_function
|
||||||
|
|
||||||
|
|
||||||
|
@command(
|
||||||
|
"analyze_code",
|
||||||
|
"Analyze Code",
|
||||||
|
'"code": "<full_code_string>"',
|
||||||
|
)
|
||||||
def analyze_code(code: str) -> list[str]:
|
def analyze_code(code: str) -> list[str]:
|
||||||
"""
|
"""
|
||||||
A function that takes in a string and returns a response from create chat
|
A function that takes in a string and returns a response from create chat
|
||||||
|
|||||||
@@ -13,11 +13,11 @@ CFG = Config()
|
|||||||
@command(
|
@command(
|
||||||
"read_audio_from_file",
|
"read_audio_from_file",
|
||||||
"Convert Audio to text",
|
"Convert Audio to text",
|
||||||
'"file": "<file>"',
|
'"filename": "<filename>"',
|
||||||
CFG.huggingface_audio_to_text_model,
|
CFG.huggingface_audio_to_text_model,
|
||||||
"Configure huggingface_audio_to_text_model.",
|
"Configure huggingface_audio_to_text_model.",
|
||||||
)
|
)
|
||||||
def read_audio_from_file(audio_path: str) -> str:
|
def read_audio_from_file(filename: str) -> str:
|
||||||
"""
|
"""
|
||||||
Convert audio to text.
|
Convert audio to text.
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ def read_audio_from_file(audio_path: str) -> str:
|
|||||||
Returns:
|
Returns:
|
||||||
str: The text from the audio
|
str: The text from the audio
|
||||||
"""
|
"""
|
||||||
audio_path = path_in_workspace(audio_path)
|
audio_path = path_in_workspace(filename)
|
||||||
with open(audio_path, "rb") as audio_file:
|
with open(audio_path, "rb") as audio_file:
|
||||||
audio = audio_file.read()
|
audio = audio_file.read()
|
||||||
return read_audio(audio)
|
return read_audio(audio)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ class Command:
|
|||||||
name: str,
|
name: str,
|
||||||
description: str,
|
description: str,
|
||||||
method: Callable[..., Any],
|
method: Callable[..., Any],
|
||||||
signature: str = None,
|
signature: str = '',
|
||||||
enabled: bool = True,
|
enabled: bool = True,
|
||||||
disabled_reason: Optional[str] = None,
|
disabled_reason: Optional[str] = None,
|
||||||
):
|
):
|
||||||
@@ -126,7 +126,7 @@ class CommandRegistry:
|
|||||||
def command(
|
def command(
|
||||||
name: str,
|
name: str,
|
||||||
description: str,
|
description: str,
|
||||||
signature: str = None,
|
signature: str = '',
|
||||||
enabled: bool = True,
|
enabled: bool = True,
|
||||||
disabled_reason: Optional[str] = None,
|
disabled_reason: Optional[str] = None,
|
||||||
) -> Callable[..., Any]:
|
) -> Callable[..., Any]:
|
||||||
|
|||||||
@@ -12,17 +12,17 @@ from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
|
|||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
|
|
||||||
@command("execute_python_file", "Execute Python File", '"file": "<file>"')
|
@command("execute_python_file", "Execute Python File", '"filename": "<filename>"')
|
||||||
def execute_python_file(file: str) -> str:
|
def execute_python_file(filename: str) -> str:
|
||||||
"""Execute a Python file in a Docker container and return the output
|
"""Execute a Python file in a Docker container and return the output
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file (str): The name of the file to execute
|
filename (str): The name of the file to execute
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The output of the file
|
str: The output of the file
|
||||||
"""
|
"""
|
||||||
|
file = filename
|
||||||
print(f"Executing file '{file}' in workspace '{WORKSPACE_PATH}'")
|
print(f"Executing file '{file}' in workspace '{WORKSPACE_PATH}'")
|
||||||
|
|
||||||
if not file.endswith(".py"):
|
if not file.endswith(".py"):
|
||||||
@@ -138,9 +138,16 @@ def execute_shell(command_line: str) -> str:
|
|||||||
|
|
||||||
os.chdir(current_dir)
|
os.chdir(current_dir)
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
|
|
||||||
|
@command(
|
||||||
|
"execute_shell_popen",
|
||||||
|
"Execute Shell Command, non-interactive commands only",
|
||||||
|
'"command_line": "<command_line>"',
|
||||||
|
CFG.execute_local_commands,
|
||||||
|
"You are not allowed to run local shell commands. To execute"
|
||||||
|
" shell commands, EXECUTE_LOCAL_COMMANDS must be set to 'True' "
|
||||||
|
"in your config. Do not attempt to bypass the restriction.",
|
||||||
|
)
|
||||||
def execute_shell_popen(command_line) -> str:
|
def execute_shell_popen(command_line) -> str:
|
||||||
"""Execute a shell command with Popen and returns an english description
|
"""Execute a shell command with Popen and returns an english description
|
||||||
of the event and the process id
|
of the event and the process id
|
||||||
|
|||||||
@@ -9,11 +9,13 @@ import requests
|
|||||||
from colorama import Back, Fore
|
from colorama import Back, Fore
|
||||||
from requests.adapters import HTTPAdapter, Retry
|
from requests.adapters import HTTPAdapter, Retry
|
||||||
|
|
||||||
|
from autogpt.config import Config
|
||||||
from autogpt.commands.command import command
|
from autogpt.commands.command import command
|
||||||
from autogpt.spinner import Spinner
|
from autogpt.spinner import Spinner
|
||||||
from autogpt.utils import readable_file_size
|
from autogpt.utils import readable_file_size
|
||||||
from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
|
from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
|
||||||
|
|
||||||
|
CFG = Config()
|
||||||
LOG_FILE = "file_logger.txt"
|
LOG_FILE = "file_logger.txt"
|
||||||
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
|
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
|
||||||
|
|
||||||
@@ -82,7 +84,7 @@ def split_file(
|
|||||||
start += max_length - overlap
|
start += max_length - overlap
|
||||||
|
|
||||||
|
|
||||||
@command("read_file", "Read file", '"file": "<file>"')
|
@command("read_file", "Read file", '"filename": "<filename>"')
|
||||||
def read_file(filename: str) -> str:
|
def read_file(filename: str) -> str:
|
||||||
"""Read a file and return the contents
|
"""Read a file and return the contents
|
||||||
|
|
||||||
@@ -135,7 +137,7 @@ def ingest_file(
|
|||||||
print(f"Error while ingesting file '{filename}': {str(e)}")
|
print(f"Error while ingesting file '{filename}': {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
@command("write_to_file", "Write to file", '"file": "<file>", "text": "<text>"')
|
@command("write_to_file", "Write to file", '"filename": "<filename>", "text": "<text>"')
|
||||||
def write_to_file(filename: str, text: str) -> str:
|
def write_to_file(filename: str, text: str) -> str:
|
||||||
"""Write text to a file
|
"""Write text to a file
|
||||||
|
|
||||||
@@ -161,7 +163,7 @@ def write_to_file(filename: str, text: str) -> str:
|
|||||||
return f"Error: {str(e)}"
|
return f"Error: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
@command("append_to_file", "Append to file", '"file": "<file>", "text": "<text>"')
|
@command("append_to_file", "Append to file", '"filename": "<filename>", "text": "<text>"')
|
||||||
def append_to_file(filename: str, text: str, shouldLog: bool = True) -> str:
|
def append_to_file(filename: str, text: str, shouldLog: bool = True) -> str:
|
||||||
"""Append text to a file
|
"""Append text to a file
|
||||||
|
|
||||||
@@ -185,7 +187,7 @@ def append_to_file(filename: str, text: str, shouldLog: bool = True) -> str:
|
|||||||
return f"Error: {str(e)}"
|
return f"Error: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
@command("delete_file", "Delete file", '"file": "<file>"')
|
@command("delete_file", "Delete file", '"filename": "<filename>"')
|
||||||
def delete_file(filename: str) -> str:
|
def delete_file(filename: str) -> str:
|
||||||
"""Delete a file
|
"""Delete a file
|
||||||
|
|
||||||
@@ -233,6 +235,8 @@ def search_files(directory: str) -> list[str]:
|
|||||||
return found_files
|
return found_files
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@command("download_file", "Search Files", '"url": "<url>", "filename": "<filename>"', CFG.allow_downloads, "Error: You do not have user authorization to download files locally.")
|
||||||
def download_file(url, filename):
|
def download_file(url, filename):
|
||||||
"""Downloads a file
|
"""Downloads a file
|
||||||
Args:
|
Args:
|
||||||
|
|||||||
@@ -11,24 +11,24 @@ CFG = Config()
|
|||||||
@command(
|
@command(
|
||||||
"clone_repository",
|
"clone_repository",
|
||||||
"Clone Repositoryy",
|
"Clone Repositoryy",
|
||||||
'"repository_url": "<url>", "clone_path": "<directory>"',
|
'"repository_url": "<repository_url>", "clone_path": "<clone_path>"',
|
||||||
CFG.github_username and CFG.github_api_key,
|
CFG.github_username and CFG.github_api_key,
|
||||||
"Configure github_username and github_api_key.",
|
"Configure github_username and github_api_key.",
|
||||||
)
|
)
|
||||||
def clone_repository(repo_url: str, clone_path: str) -> str:
|
def clone_repository(repository_url: str, clone_path: str) -> str:
|
||||||
"""Clone a GitHub repository locally
|
"""Clone a GitHub repository locally
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
repo_url (str): The URL of the repository to clone
|
repository_url (str): The URL of the repository to clone
|
||||||
clone_path (str): The path to clone the repository to
|
clone_path (str): The path to clone the repository to
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The result of the clone operation"""
|
str: The result of the clone operation"""
|
||||||
split_url = repo_url.split("//")
|
split_url = repository_url.split("//")
|
||||||
auth_repo_url = f"//{CFG.github_username}:{CFG.github_api_key}@".join(split_url)
|
auth_repo_url = f"//{CFG.github_username}:{CFG.github_api_key}@".join(split_url)
|
||||||
safe_clone_path = path_in_workspace(clone_path)
|
safe_clone_path = path_in_workspace(clone_path)
|
||||||
try:
|
try:
|
||||||
Repo.clone_from(auth_repo_url, safe_clone_path)
|
Repo.clone_from(auth_repo_url, safe_clone_path)
|
||||||
return f"""Cloned {repo_url} to {safe_clone_path}"""
|
return f"""Cloned {repository_url} to {safe_clone_path}"""
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return f"Error: {str(e)}"
|
return f"Error: {str(e)}"
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from autogpt.config import Config
|
|||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
|
|
||||||
@command("google", "Google Search", '"query": "<search>"', not CFG.google_api_key)
|
@command("google", "Google Search", '"query": "<query>"', not CFG.google_api_key)
|
||||||
def google_search(query: str, num_results: int = 8) -> str:
|
def google_search(query: str, num_results: int = 8) -> str:
|
||||||
"""Return the results of a Google search
|
"""Return the results of a Google search
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ def google_search(query: str, num_results: int = 8) -> str:
|
|||||||
@command(
|
@command(
|
||||||
"google",
|
"google",
|
||||||
"Google Search",
|
"Google Search",
|
||||||
'"query": "<search>"',
|
'"query": "<query>"',
|
||||||
bool(CFG.google_api_key),
|
bool(CFG.google_api_key),
|
||||||
"Configure google_api_key.",
|
"Configure google_api_key.",
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ load_dotenv()
|
|||||||
@command(
|
@command(
|
||||||
"send_tweet",
|
"send_tweet",
|
||||||
"Send Tweet",
|
"Send Tweet",
|
||||||
'"text": "<text>"',
|
'"tweet_text": "<tweet_text>"',
|
||||||
)
|
)
|
||||||
def send_tweet(tweet_text: str) -> str:
|
def send_tweet(tweet_text: str) -> str:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ import yaml
|
|||||||
|
|
||||||
from autogpt.prompts.generator import PromptGenerator
|
from autogpt.prompts.generator import PromptGenerator
|
||||||
|
|
||||||
|
# Soon this will go in a folder where it remembers more stuff about the run(s)
|
||||||
|
SAVE_FILE = str(Path(os.getcwd()) / "ai_settings.yaml")
|
||||||
|
|
||||||
|
|
||||||
class AIConfig:
|
class AIConfig:
|
||||||
"""
|
"""
|
||||||
@@ -44,8 +47,6 @@ class AIConfig:
|
|||||||
self.prompt_generator = None
|
self.prompt_generator = None
|
||||||
self.command_registry = None
|
self.command_registry = None
|
||||||
|
|
||||||
# Soon this will go in a folder where it remembers more stuff about the run(s)
|
|
||||||
SAVE_FILE = Path(os.getcwd()) / "ai_settings.yaml"
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(config_file: str = SAVE_FILE) -> "AIConfig":
|
def load(config_file: str = SAVE_FILE) -> "AIConfig":
|
||||||
|
|||||||
@@ -83,12 +83,14 @@ def create_chat_completion(
|
|||||||
temperature=temperature,
|
temperature=temperature,
|
||||||
max_tokens=max_tokens,
|
max_tokens=max_tokens,
|
||||||
):
|
):
|
||||||
return plugin.handle_chat_completion(
|
message = plugin.handle_chat_completion(
|
||||||
messages=messages,
|
messages=messages,
|
||||||
model=model,
|
model=model,
|
||||||
temperature=temperature,
|
temperature=temperature,
|
||||||
max_tokens=max_tokens,
|
max_tokens=max_tokens,
|
||||||
)
|
)
|
||||||
|
if message is not None:
|
||||||
|
return message
|
||||||
response = None
|
response = None
|
||||||
for attempt in range(num_retries):
|
for attempt in range(num_retries):
|
||||||
backoff = 2 ** (attempt + 2)
|
backoff = 2 ** (attempt + 2)
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
|
|
||||||
def on_response(self, response: str, *args, **kwargs) -> str:
|
def on_response(self, response: str, *args, **kwargs) -> str:
|
||||||
"""This method is called when a response is received from the model."""
|
"""This method is called when a response is received from the model."""
|
||||||
pass
|
return response
|
||||||
|
|
||||||
def can_handle_post_prompt(self) -> bool:
|
def can_handle_post_prompt(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -51,7 +51,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
PromptGenerator: The prompt generator.
|
PromptGenerator: The prompt generator.
|
||||||
"""
|
"""
|
||||||
pass
|
return prompt
|
||||||
|
|
||||||
def can_handle_on_planning(self) -> bool:
|
def can_handle_on_planning(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -84,7 +84,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
str: The resulting response.
|
str: The resulting response.
|
||||||
"""
|
"""
|
||||||
pass
|
return response
|
||||||
|
|
||||||
def can_handle_pre_instruction(self) -> bool:
|
def can_handle_pre_instruction(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -100,7 +100,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
List[Message]: The resulting list of messages.
|
List[Message]: The resulting list of messages.
|
||||||
"""
|
"""
|
||||||
pass
|
return messages
|
||||||
|
|
||||||
def can_handle_on_instruction(self) -> bool:
|
def can_handle_on_instruction(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -132,7 +132,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
str: The resulting response.
|
str: The resulting response.
|
||||||
"""
|
"""
|
||||||
pass
|
return response
|
||||||
|
|
||||||
def can_handle_pre_command(self) -> bool:
|
def can_handle_pre_command(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -151,7 +151,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[str, Dict[str, Any]]: The command name and the arguments.
|
Tuple[str, Dict[str, Any]]: The command name and the arguments.
|
||||||
"""
|
"""
|
||||||
pass
|
return command_name, arguments
|
||||||
|
|
||||||
def can_handle_post_command(self) -> bool:
|
def can_handle_post_command(self) -> bool:
|
||||||
"""This method is called to check that the plugin can
|
"""This method is called to check that the plugin can
|
||||||
@@ -168,7 +168,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
|
|||||||
Returns:
|
Returns:
|
||||||
str: The resulting response.
|
str: The resulting response.
|
||||||
"""
|
"""
|
||||||
pass
|
return response
|
||||||
|
|
||||||
def can_handle_chat_completion(
|
def can_handle_chat_completion(
|
||||||
self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int
|
self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int
|
||||||
|
|||||||
@@ -71,7 +71,9 @@ def build_default_prompt_generator() -> PromptGenerator:
|
|||||||
"Every command has a cost, so be smart and efficient. Aim to complete tasks in"
|
"Every command has a cost, so be smart and efficient. Aim to complete tasks in"
|
||||||
" the least number of steps."
|
" the least number of steps."
|
||||||
)
|
)
|
||||||
|
prompt_generator.add_performance_evaluation(
|
||||||
|
"Write all code to a file."
|
||||||
|
)
|
||||||
return prompt_generator
|
return prompt_generator
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import os
|
|||||||
import requests
|
import requests
|
||||||
import yaml
|
import yaml
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
from git import Repo
|
from git.repo import Repo
|
||||||
|
|
||||||
|
|
||||||
def clean_input(prompt: str = ""):
|
def clean_input(prompt: str = ""):
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ pre-commit
|
|||||||
black
|
black
|
||||||
isort
|
isort
|
||||||
gitpython==3.1.31
|
gitpython==3.1.31
|
||||||
abstract-singleton
|
|
||||||
auto-gpt-plugin-template
|
auto-gpt-plugin-template
|
||||||
|
|
||||||
# Items below this point will not be included in the Docker Image
|
# Items below this point will not be included in the Docker Image
|
||||||
@@ -48,5 +47,3 @@ pytest-mock
|
|||||||
|
|
||||||
# OpenAI and Generic plugins import
|
# OpenAI and Generic plugins import
|
||||||
openapi-python-client==0.13.4
|
openapi-python-client==0.13.4
|
||||||
abstract-singleton
|
|
||||||
auto-gpt-plugin-template
|
|
||||||
|
|||||||
Reference in New Issue
Block a user