Fix all commands and cleanup

This commit is contained in:
BillSchumacher
2023-04-19 18:17:04 -05:00
parent 23c650ca10
commit d7679d755f
16 changed files with 63 additions and 83 deletions

View File

@@ -3,33 +3,9 @@ import json
from typing import Dict, List, NoReturn, Union
from autogpt.agent.agent_manager import AgentManager
from autogpt.commands.analyze_code import analyze_code
from autogpt.commands.audio_text import read_audio_from_file
from autogpt.commands.command import CommandRegistry, command
from autogpt.commands.evaluate_code import evaluate_code
from autogpt.commands.execute_code import (
execute_python_file,
execute_shell,
execute_shell_popen,
)
from autogpt.commands.file_operations import (
append_to_file,
delete_file,
download_file,
read_file,
search_files,
write_to_file,
)
from autogpt.commands.git_operations import clone_repository
from autogpt.commands.google_search import google_official_search, google_search
from autogpt.commands.image_gen import generate_image
from autogpt.commands.improve_code import improve_code
from autogpt.commands.twitter import send_tweet
from autogpt.commands.web_requests import scrape_links, scrape_text
from autogpt.commands.web_selenium import browse_website
from autogpt.commands.write_tests import write_tests
from autogpt.config import Config
from autogpt.json_utils.json_fix_llm import fix_and_parse_json
from autogpt.memory import get_memory
from autogpt.processing.text import summarize_text
from autogpt.prompts.generator import PromptGenerator
@@ -137,26 +113,8 @@ def execute_command(
command_name = map_command_synonyms(command_name.lower())
if command_name == "memory_add":
return memory.add(arguments["string"])
elif command_name == "get_text_summary":
return get_text_summary(arguments["url"], arguments["question"])
elif command_name == "get_hyperlinks":
return get_hyperlinks(arguments["url"])
elif command_name == "analyze_code":
return analyze_code(arguments["code"])
elif command_name == "download_file":
if not CFG.allow_downloads:
return "Error: You do not have user authorization to download files locally."
return download_file(arguments["url"], arguments["file"])
elif command_name == "execute_shell_popen":
if CFG.execute_local_commands:
return execute_shell_popen(arguments["command_line"])
else:
return (
"You are not allowed to run local shell commands. To execute"
" shell commands, EXECUTE_LOCAL_COMMANDS must be set to 'True' "
"in your config. Do not attempt to bypass the restriction."
)
return get_memory(CFG).add(arguments["string"])
# TODO: Change these to take in a file rather than pasted code, if
# non-file is given, return instructions "Input should be a python
# filepath, write your code to file and try again
@@ -177,6 +135,7 @@ def execute_command(
return f"Error: {str(e)}"
@command("get_text_summary", "Get text summary", '"url": "<url>", "question": "<question>"')
def get_text_summary(url: str, question: str) -> str:
"""Return the results of a Google search
@@ -192,6 +151,7 @@ def get_text_summary(url: str, question: str) -> str:
return f""" "Result" : {summary}"""
@command("get_hyperlinks", "Get text summary", '"url": "<url>"')
def get_hyperlinks(url: str) -> Union[str, List[str]]:
"""Return the results of a Google search

View File

@@ -130,13 +130,14 @@ def main(
cfg.set_plugins(scan_plugins(cfg, cfg.debug_mode))
# Create a CommandRegistry instance and scan default folder
command_registry = CommandRegistry()
command_registry.import_commands("autogpt.commands.analyze_code")
command_registry.import_commands("autogpt.commands.audio_text")
command_registry.import_commands("autogpt.commands.evaluate_code")
command_registry.import_commands("autogpt.commands.execute_code")
command_registry.import_commands("autogpt.commands.file_operations")
command_registry.import_commands("autogpt.commands.git_operations")
command_registry.import_commands("autogpt.commands.google_search")
command_registry.import_commands("autogpt.commands.image_gen")
command_registry.import_commands("autogpt.commands.improve_code")
command_registry.import_commands("autogpt.commands.twitter")
command_registry.import_commands("autogpt.commands.web_selenium")
command_registry.import_commands("autogpt.commands.write_tests")

View File

@@ -1,9 +1,15 @@
"""Code evaluation module."""
from __future__ import annotations
from autogpt.commands.command import command
from autogpt.llm_utils import call_ai_function
@command(
"analyze_code",
"Analyze Code",
'"code": "<full_code_string>"',
)
def analyze_code(code: str) -> list[str]:
"""
A function that takes in a string and returns a response from create chat

View File

@@ -13,11 +13,11 @@ CFG = Config()
@command(
"read_audio_from_file",
"Convert Audio to text",
'"file": "<file>"',
'"filename": "<filename>"',
CFG.huggingface_audio_to_text_model,
"Configure huggingface_audio_to_text_model.",
)
def read_audio_from_file(audio_path: str) -> str:
def read_audio_from_file(filename: str) -> str:
"""
Convert audio to text.
@@ -27,7 +27,7 @@ def read_audio_from_file(audio_path: str) -> str:
Returns:
str: The text from the audio
"""
audio_path = path_in_workspace(audio_path)
audio_path = path_in_workspace(filename)
with open(audio_path, "rb") as audio_file:
audio = audio_file.read()
return read_audio(audio)

View File

@@ -20,7 +20,7 @@ class Command:
name: str,
description: str,
method: Callable[..., Any],
signature: str = None,
signature: str = '',
enabled: bool = True,
disabled_reason: Optional[str] = None,
):
@@ -126,7 +126,7 @@ class CommandRegistry:
def command(
name: str,
description: str,
signature: str = None,
signature: str = '',
enabled: bool = True,
disabled_reason: Optional[str] = None,
) -> Callable[..., Any]:

View File

@@ -12,17 +12,17 @@ from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
CFG = Config()
@command("execute_python_file", "Execute Python File", '"file": "<file>"')
def execute_python_file(file: str) -> str:
@command("execute_python_file", "Execute Python File", '"filename": "<filename>"')
def execute_python_file(filename: str) -> str:
"""Execute a Python file in a Docker container and return the output
Args:
file (str): The name of the file to execute
filename (str): The name of the file to execute
Returns:
str: The output of the file
"""
file = filename
print(f"Executing file '{file}' in workspace '{WORKSPACE_PATH}'")
if not file.endswith(".py"):
@@ -138,9 +138,16 @@ def execute_shell(command_line: str) -> str:
os.chdir(current_dir)
return output
@command(
"execute_shell_popen",
"Execute Shell Command, non-interactive commands only",
'"command_line": "<command_line>"',
CFG.execute_local_commands,
"You are not allowed to run local shell commands. To execute"
" shell commands, EXECUTE_LOCAL_COMMANDS must be set to 'True' "
"in your config. Do not attempt to bypass the restriction.",
)
def execute_shell_popen(command_line) -> str:
"""Execute a shell command with Popen and returns an english description
of the event and the process id

View File

@@ -9,11 +9,13 @@ import requests
from colorama import Back, Fore
from requests.adapters import HTTPAdapter, Retry
from autogpt.config import Config
from autogpt.commands.command import command
from autogpt.spinner import Spinner
from autogpt.utils import readable_file_size
from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
CFG = Config()
LOG_FILE = "file_logger.txt"
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
@@ -82,7 +84,7 @@ def split_file(
start += max_length - overlap
@command("read_file", "Read file", '"file": "<file>"')
@command("read_file", "Read file", '"filename": "<filename>"')
def read_file(filename: str) -> str:
"""Read a file and return the contents
@@ -135,7 +137,7 @@ def ingest_file(
print(f"Error while ingesting file '{filename}': {str(e)}")
@command("write_to_file", "Write to file", '"file": "<file>", "text": "<text>"')
@command("write_to_file", "Write to file", '"filename": "<filename>", "text": "<text>"')
def write_to_file(filename: str, text: str) -> str:
"""Write text to a file
@@ -161,7 +163,7 @@ def write_to_file(filename: str, text: str) -> str:
return f"Error: {str(e)}"
@command("append_to_file", "Append to file", '"file": "<file>", "text": "<text>"')
@command("append_to_file", "Append to file", '"filename": "<filename>", "text": "<text>"')
def append_to_file(filename: str, text: str, shouldLog: bool = True) -> str:
"""Append text to a file
@@ -185,7 +187,7 @@ def append_to_file(filename: str, text: str, shouldLog: bool = True) -> str:
return f"Error: {str(e)}"
@command("delete_file", "Delete file", '"file": "<file>"')
@command("delete_file", "Delete file", '"filename": "<filename>"')
def delete_file(filename: str) -> str:
"""Delete a file
@@ -233,6 +235,8 @@ def search_files(directory: str) -> list[str]:
return found_files
@command("download_file", "Search Files", '"url": "<url>", "filename": "<filename>"', CFG.allow_downloads, "Error: You do not have user authorization to download files locally.")
def download_file(url, filename):
"""Downloads a file
Args:

View File

@@ -11,24 +11,24 @@ CFG = Config()
@command(
"clone_repository",
"Clone Repositoryy",
'"repository_url": "<url>", "clone_path": "<directory>"',
'"repository_url": "<repository_url>", "clone_path": "<clone_path>"',
CFG.github_username and CFG.github_api_key,
"Configure github_username and github_api_key.",
)
def clone_repository(repo_url: str, clone_path: str) -> str:
def clone_repository(repository_url: str, clone_path: str) -> str:
"""Clone a GitHub repository locally
Args:
repo_url (str): The URL of the repository to clone
repository_url (str): The URL of the repository to clone
clone_path (str): The path to clone the repository to
Returns:
str: The result of the clone operation"""
split_url = repo_url.split("//")
split_url = repository_url.split("//")
auth_repo_url = f"//{CFG.github_username}:{CFG.github_api_key}@".join(split_url)
safe_clone_path = path_in_workspace(clone_path)
try:
Repo.clone_from(auth_repo_url, safe_clone_path)
return f"""Cloned {repo_url} to {safe_clone_path}"""
return f"""Cloned {repository_url} to {safe_clone_path}"""
except Exception as e:
return f"Error: {str(e)}"

View File

@@ -11,7 +11,7 @@ from autogpt.config import Config
CFG = Config()
@command("google", "Google Search", '"query": "<search>"', not CFG.google_api_key)
@command("google", "Google Search", '"query": "<query>"', not CFG.google_api_key)
def google_search(query: str, num_results: int = 8) -> str:
"""Return the results of a Google search
@@ -40,7 +40,7 @@ def google_search(query: str, num_results: int = 8) -> str:
@command(
"google",
"Google Search",
'"query": "<search>"',
'"query": "<query>"',
bool(CFG.google_api_key),
"Configure google_api_key.",
)

View File

@@ -12,7 +12,7 @@ load_dotenv()
@command(
"send_tweet",
"Send Tweet",
'"text": "<text>"',
'"tweet_text": "<tweet_text>"',
)
def send_tweet(tweet_text: str) -> str:
"""

View File

@@ -12,6 +12,9 @@ import yaml
from autogpt.prompts.generator import PromptGenerator
# Soon this will go in a folder where it remembers more stuff about the run(s)
SAVE_FILE = str(Path(os.getcwd()) / "ai_settings.yaml")
class AIConfig:
"""
@@ -44,8 +47,6 @@ class AIConfig:
self.prompt_generator = None
self.command_registry = None
# Soon this will go in a folder where it remembers more stuff about the run(s)
SAVE_FILE = Path(os.getcwd()) / "ai_settings.yaml"
@staticmethod
def load(config_file: str = SAVE_FILE) -> "AIConfig":

View File

@@ -83,12 +83,14 @@ def create_chat_completion(
temperature=temperature,
max_tokens=max_tokens,
):
return plugin.handle_chat_completion(
message = plugin.handle_chat_completion(
messages=messages,
model=model,
temperature=temperature,
max_tokens=max_tokens,
)
if message is not None:
return message
response = None
for attempt in range(num_retries):
backoff = 2 ** (attempt + 2)

View File

@@ -34,7 +34,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
def on_response(self, response: str, *args, **kwargs) -> str:
"""This method is called when a response is received from the model."""
pass
return response
def can_handle_post_prompt(self) -> bool:
"""This method is called to check that the plugin can
@@ -51,7 +51,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
PromptGenerator: The prompt generator.
"""
pass
return prompt
def can_handle_on_planning(self) -> bool:
"""This method is called to check that the plugin can
@@ -84,7 +84,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
str: The resulting response.
"""
pass
return response
def can_handle_pre_instruction(self) -> bool:
"""This method is called to check that the plugin can
@@ -100,7 +100,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
List[Message]: The resulting list of messages.
"""
pass
return messages
def can_handle_on_instruction(self) -> bool:
"""This method is called to check that the plugin can
@@ -132,7 +132,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
str: The resulting response.
"""
pass
return response
def can_handle_pre_command(self) -> bool:
"""This method is called to check that the plugin can
@@ -151,7 +151,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
Tuple[str, Dict[str, Any]]: The command name and the arguments.
"""
pass
return command_name, arguments
def can_handle_post_command(self) -> bool:
"""This method is called to check that the plugin can
@@ -168,7 +168,7 @@ class BaseOpenAIPlugin(AutoGPTPluginTemplate):
Returns:
str: The resulting response.
"""
pass
return response
def can_handle_chat_completion(
self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int

View File

@@ -71,7 +71,9 @@ def build_default_prompt_generator() -> PromptGenerator:
"Every command has a cost, so be smart and efficient. Aim to complete tasks in"
" the least number of steps."
)
prompt_generator.add_performance_evaluation(
"Write all code to a file."
)
return prompt_generator

View File

@@ -3,7 +3,7 @@ import os
import requests
import yaml
from colorama import Fore
from git import Repo
from git.repo import Repo
def clean_input(prompt: str = ""):

View File

@@ -31,7 +31,6 @@ pre-commit
black
isort
gitpython==3.1.31
abstract-singleton
auto-gpt-plugin-template
# Items below this point will not be included in the Docker Image
@@ -48,5 +47,3 @@ pytest-mock
# OpenAI and Generic plugins import
openapi-python-client==0.13.4
abstract-singleton
auto-gpt-plugin-template