diff --git a/scripts/agent_manager.py b/scripts/agent_manager.py index ad120c40..e6bf3b86 100644 --- a/scripts/agent_manager.py +++ b/scripts/agent_manager.py @@ -7,6 +7,7 @@ agents = {} # key, (task, full_message_history, model) # TODO: Centralise use of create_chat_completion() to globally enforce token limit def create_agent(task, prompt, model): + """Create a new agent and return its key""" global next_key global agents @@ -32,6 +33,7 @@ def create_agent(task, prompt, model): def message_agent(key, message): + """Send a message to an agent and return its response""" global agents task, messages, model = agents[int(key)] @@ -52,6 +54,7 @@ def message_agent(key, message): def list_agents(): + """Return a list of all agents""" global agents # Return a list of agent keys and their tasks @@ -59,6 +62,7 @@ def list_agents(): def delete_agent(key): + """Delete an agent and return True if successful, False otherwise""" global agents try: diff --git a/scripts/ai_config.py b/scripts/ai_config.py index 8cfa183a..59c75201 100644 --- a/scripts/ai_config.py +++ b/scripts/ai_config.py @@ -3,7 +3,9 @@ import data import os class AIConfig: + """Class to store the AI's name, role, and goals.""" def __init__(self, ai_name="", ai_role="", ai_goals=[]): + """Initialize the AIConfig class""" self.ai_name = ai_name self.ai_role = ai_role self.ai_goals = ai_goals @@ -13,7 +15,7 @@ class AIConfig: @classmethod def load(cls, config_file=SAVE_FILE): - # Load variables from yaml file if it exists + """Load variables from yaml file if it exists, otherwise use defaults.""" try: with open(config_file) as file: config_params = yaml.load(file, Loader=yaml.FullLoader) @@ -27,11 +29,14 @@ class AIConfig: return cls(ai_name, ai_role, ai_goals) def save(self, config_file=SAVE_FILE): + """Save variables to yaml file.""" config = {"ai_name": self.ai_name, "ai_role": self.ai_role, "ai_goals": self.ai_goals} with open(config_file, "w") as file: yaml.dump(config, file) + def construct_full_prompt(self): + """Construct the full prompt for the AI to use.""" prompt_start = """Your decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.""" # Construct full prompt diff --git a/scripts/ai_functions.py b/scripts/ai_functions.py index 05aa93a2..f93d7ea6 100644 --- a/scripts/ai_functions.py +++ b/scripts/ai_functions.py @@ -6,8 +6,8 @@ from json_parser import fix_and_parse_json cfg = Config() # Evaluating code - def evaluate_code(code: str) -> List[str]: + """Evaluates the given code and returns a list of suggestions for improvements.""" function_string = "def analyze_code(code: str) -> List[str]:" args = [code] description_string = """Analyzes the given code and returns a list of suggestions for improvements.""" @@ -18,8 +18,8 @@ def evaluate_code(code: str) -> List[str]: # Improving code - def improve_code(suggestions: List[str], code: str) -> str: + """Improves the provided code based on the suggestions provided, making no other changes.""" function_string = ( "def generate_improved_code(suggestions: List[str], code: str) -> str:" ) @@ -31,9 +31,8 @@ def improve_code(suggestions: List[str], code: str) -> str: # Writing tests - - def write_tests(code: str, focus: List[str]) -> str: + """Generates test cases for the existing code, focusing on specific areas if required.""" function_string = ( "def create_test_cases(code: str, focus: Optional[str] = None) -> str:" ) diff --git a/scripts/browse.py b/scripts/browse.py index 7eeaaf4d..c15214e7 100644 --- a/scripts/browse.py +++ b/scripts/browse.py @@ -6,6 +6,7 @@ from llm_utils import create_chat_completion cfg = Config() def scrape_text(url): + """Scrape text from a webpage""" # Most basic check if the URL is valid: if not url.startswith('http'): return "Error: Invalid URL" @@ -33,6 +34,7 @@ def scrape_text(url): def extract_hyperlinks(soup): + """Extract hyperlinks from a BeautifulSoup object""" hyperlinks = [] for link in soup.find_all('a', href=True): hyperlinks.append((link.text, link['href'])) @@ -40,6 +42,7 @@ def extract_hyperlinks(soup): def format_hyperlinks(hyperlinks): + """Format hyperlinks into a list of strings""" formatted_links = [] for link_text, link_url in hyperlinks: formatted_links.append(f"{link_text} ({link_url})") @@ -47,6 +50,7 @@ def format_hyperlinks(hyperlinks): def scrape_links(url): + """Scrape links from a webpage""" response = requests.get(url, headers=cfg.user_agent_header) # Check if the response contains an HTTP error @@ -64,6 +68,7 @@ def scrape_links(url): def split_text(text, max_length=8192): + """Split text into chunks of a maximum length""" paragraphs = text.split("\n") current_length = 0 current_chunk = [] @@ -82,12 +87,14 @@ def split_text(text, max_length=8192): def create_message(chunk, question): + """Create a message for the user to summarize a chunk of text""" return { "role": "user", "content": f"\"\"\"{chunk}\"\"\" Using the above text, please answer the following question: \"{question}\" -- if the question cannot be answered using the text, please summarize the text." } def summarize_text(text, question): + """Summarize text using the LLM model""" if not text: return "Error: No text to summarize" diff --git a/scripts/call_ai_function.py b/scripts/call_ai_function.py index db1c9556..f8238658 100644 --- a/scripts/call_ai_function.py +++ b/scripts/call_ai_function.py @@ -1,11 +1,12 @@ from config import Config + cfg = Config() from llm_utils import create_chat_completion - # This is a magic function that can do anything with no-code. See # https://github.com/Torantulino/AI-Functions for more info. def call_ai_function(function, args, description, model=None): + """Call an AI function""" if model is None: model = cfg.smart_llm_model # For each arg, if any are None, convert to "None": diff --git a/scripts/chat.py b/scripts/chat.py index a27fbfd7..f4cf2299 100644 --- a/scripts/chat.py +++ b/scripts/chat.py @@ -3,11 +3,9 @@ import openai from dotenv import load_dotenv from config import Config import token_counter - -cfg = Config() - from llm_utils import create_chat_completion +cfg = Config() def create_chat_message(role, content): """ @@ -48,6 +46,7 @@ def chat_with_ai( permanent_memory, token_limit, debug=False): + """Interact with the OpenAI API, sending the prompt, user input, message history, and permanent memory.""" while True: try: """ @@ -66,7 +65,7 @@ def chat_with_ai( model = cfg.fast_llm_model # TODO: Change model from hardcode to argument # Reserve 1000 tokens for the response if debug: - print(f"Token limit: {token_limit}") + print(f"Token limit: {token_limit}") send_token_limit = token_limit - 1000 relevant_memory = permanent_memory.get_relevant(str(full_message_history[-5:]), 10) diff --git a/scripts/commands.py b/scripts/commands.py index 02f3baa8..5e14f6cc 100644 --- a/scripts/commands.py +++ b/scripts/commands.py @@ -25,6 +25,7 @@ def is_valid_int(value): return False def get_command(response): + """Parse the response and return the command name and arguments""" try: response_json = fix_and_parse_json(response) @@ -53,6 +54,7 @@ def get_command(response): def execute_command(command_name, arguments): + """Execute the command and return the result""" memory = get_memory(cfg) try: @@ -118,11 +120,13 @@ def execute_command(command_name, arguments): def get_datetime(): + """Return the current date and time""" return "Current date and time: " + \ datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") def google_search(query, num_results=8): + """Return the results of a google search""" search_results = [] for j in ddg(query, max_results=num_results): search_results.append(j) @@ -130,6 +134,7 @@ def google_search(query, num_results=8): return json.dumps(search_results, ensure_ascii=False, indent=4) def google_official_search(query, num_results=8): + """Return the results of a google search using the official Google API""" from googleapiclient.discovery import build from googleapiclient.errors import HttpError import json @@ -165,6 +170,7 @@ def google_official_search(query, num_results=8): return search_results_links def browse_website(url, question): + """Browse a website and return the summary and links""" summary = get_text_summary(url, question) links = get_hyperlinks(url) @@ -178,23 +184,27 @@ def browse_website(url, question): def get_text_summary(url, question): + """Return the results of a google search""" text = browse.scrape_text(url) summary = browse.summarize_text(text, question) return """ "Result" : """ + summary def get_hyperlinks(url): + """Return the results of a google search""" link_list = browse.scrape_links(url) return link_list def commit_memory(string): + """Commit a string to memory""" _text = f"""Committing memory with string "{string}" """ mem.permanent_memory.append(string) return _text def delete_memory(key): + """Delete a memory with a given key""" if key >= 0 and key < len(mem.permanent_memory): _text = "Deleting memory with key " + str(key) del mem.permanent_memory[key] @@ -206,6 +216,7 @@ def delete_memory(key): def overwrite_memory(key, string): + """Overwrite a memory with a given key and string""" # Check if the key is a valid integer if is_valid_int(key): key_int = int(key) @@ -232,11 +243,13 @@ def overwrite_memory(key, string): def shutdown(): + """Shut down the program""" print("Shutting down...") quit() def start_agent(name, task, prompt, model=cfg.fast_llm_model): + """Start an agent with a given name, task, and prompt""" global cfg # Remove underscores from name @@ -260,6 +273,7 @@ def start_agent(name, task, prompt, model=cfg.fast_llm_model): def message_agent(key, message): + """Message an agent with a given key and message""" global cfg # Check if the key is a valid integer @@ -278,10 +292,12 @@ def message_agent(key, message): def list_agents(): + """List all agents""" return agents.list_agents() def delete_agent(key): + """Delete an agent with a given key""" result = agents.delete_agent(key) if not result: return f"Agent {key} does not exist." diff --git a/scripts/config.py b/scripts/config.py index 4d7adec1..03f1d5df 100644 --- a/scripts/config.py +++ b/scripts/config.py @@ -14,6 +14,7 @@ class Singleton(abc.ABCMeta, type): _instances = {} def __call__(cls, *args, **kwargs): + """Call method for the singleton metaclass.""" if cls not in cls._instances: cls._instances[cls] = super( Singleton, cls).__call__( @@ -31,6 +32,7 @@ class Config(metaclass=Singleton): """ def __init__(self): + """Initialize the Config class""" self.debug = False self.continuous_mode = False self.speak_mode = False @@ -77,40 +79,53 @@ class Config(metaclass=Singleton): openai.api_key = self.openai_api_key def set_continuous_mode(self, value: bool): + """Set the continuous mode value.""" self.continuous_mode = value def set_speak_mode(self, value: bool): + """Set the speak mode value.""" self.speak_mode = value def set_fast_llm_model(self, value: str): + """Set the fast LLM model value.""" self.fast_llm_model = value def set_smart_llm_model(self, value: str): + """Set the smart LLM model value.""" self.smart_llm_model = value def set_fast_token_limit(self, value: int): + """Set the fast token limit value.""" self.fast_token_limit = value def set_smart_token_limit(self, value: int): + """Set the smart token limit value.""" self.smart_token_limit = value def set_openai_api_key(self, value: str): + """Set the OpenAI API key value.""" self.openai_api_key = value - + def set_elevenlabs_api_key(self, value: str): + """Set the ElevenLabs API key value.""" self.elevenlabs_api_key = value - + def set_google_api_key(self, value: str): + """Set the Google API key value.""" self.google_api_key = value - + def set_custom_search_engine_id(self, value: str): + """Set the custom search engine id value.""" self.custom_search_engine_id = value def set_pinecone_api_key(self, value: str): + """Set the Pinecone API key value.""" self.pinecone_api_key = value def set_pinecone_region(self, value: str): + """Set the Pinecone region value.""" self.pinecone_region = value def set_debug_mode(self, value: bool): + """Set the debug mode value.""" self.debug = value diff --git a/scripts/data.py b/scripts/data.py index 8d8a7b4a..cd41f313 100644 --- a/scripts/data.py +++ b/scripts/data.py @@ -2,6 +2,7 @@ import os from pathlib import Path def load_prompt(): + """Load the prompt from data/prompt.txt""" try: # get directory of this file: file_dir = Path(__file__).parent diff --git a/scripts/execute_code.py b/scripts/execute_code.py index 614ef6fc..f34469dd 100644 --- a/scripts/execute_code.py +++ b/scripts/execute_code.py @@ -3,6 +3,7 @@ import os def execute_python_file(file): + """Execute a Python file in a Docker container and return the output""" workspace_folder = "auto_gpt_workspace" print (f"Executing file '{file}' in workspace '{workspace_folder}'") diff --git a/scripts/file_operations.py b/scripts/file_operations.py index 90c9a1e4..1b87cc28 100644 --- a/scripts/file_operations.py +++ b/scripts/file_operations.py @@ -4,11 +4,13 @@ import os.path # Set a dedicated folder for file I/O working_directory = "auto_gpt_workspace" +# Create the directory if it doesn't exist if not os.path.exists(working_directory): os.makedirs(working_directory) def safe_join(base, *paths): + """Join one or more path components intelligently.""" new_path = os.path.join(base, *paths) norm_new_path = os.path.normpath(new_path) @@ -19,6 +21,7 @@ def safe_join(base, *paths): def read_file(filename): + """Read a file and return the contents""" try: filepath = safe_join(working_directory, filename) with open(filepath, "r") as f: @@ -29,6 +32,7 @@ def read_file(filename): def write_to_file(filename, text): + """Write text to a file""" try: filepath = safe_join(working_directory, filename) directory = os.path.dirname(filepath) @@ -42,6 +46,7 @@ def write_to_file(filename, text): def append_to_file(filename, text): + """Append text to a file""" try: filepath = safe_join(working_directory, filename) with open(filepath, "a") as f: @@ -52,6 +57,7 @@ def append_to_file(filename, text): def delete_file(filename): + """Delete a file""" try: filepath = safe_join(working_directory, filename) os.remove(filepath) diff --git a/scripts/json_parser.py b/scripts/json_parser.py index 6a5f073f..07de530c 100644 --- a/scripts/json_parser.py +++ b/scripts/json_parser.py @@ -26,10 +26,11 @@ JSON_SCHEMA = """ """ -def fix_and_parse_json( +def fix_and_parse_json( json_str: str, try_to_fix_with_gpt: bool = True ) -> Union[str, Dict[Any, Any]]: + """Fix and parse JSON string""" try: json_str = json_str.replace('\t', '') return json.loads(json_str) @@ -72,6 +73,7 @@ def fix_and_parse_json( def fix_json(json_str: str, schema: str, debug=False) -> str: + """Fix the given JSON string to make it parseable and fully complient with the provided schema.""" # Try to fix the JSON using gpt: function_string = "def fix_json(json_str: str, schema:str=None) -> str:" args = [f"'''{json_str}'''", f"'''{schema}'''"] @@ -93,6 +95,7 @@ def fix_json(json_str: str, schema: str, debug=False) -> str: print("-----------") print(f"Fixed JSON: {result_string}") print("----------- END OF FIX ATTEMPT ----------------") + try: json.loads(result_string) # just check the validity return result_string diff --git a/scripts/llm_utils.py b/scripts/llm_utils.py index 5a471ab7..94ba5f13 100644 --- a/scripts/llm_utils.py +++ b/scripts/llm_utils.py @@ -6,6 +6,7 @@ openai.api_key = cfg.openai_api_key # Overly simple abstraction until we create something better def create_chat_completion(messages, model=None, temperature=None, max_tokens=None)->str: + """Create a chat completion using the OpenAI API""" if cfg.use_azure: response = openai.ChatCompletion.create( deployment_id=cfg.openai_deployment_id, diff --git a/scripts/main.py b/scripts/main.py index f96afeb1..6115cffe 100644 --- a/scripts/main.py +++ b/scripts/main.py @@ -25,6 +25,7 @@ def print_to_console( speak_text=False, min_typing_speed=0.05, max_typing_speed=0.01): + """Prints text to the console with a typing effect""" global cfg if speak_text and cfg.speak_mode: speak.say_text(f"{title}. {content}") @@ -46,6 +47,7 @@ def print_to_console( def print_assistant_thoughts(assistant_reply): + """Prints the assistant's thoughts to the console""" global ai_name global cfg try: @@ -105,7 +107,7 @@ def print_assistant_thoughts(assistant_reply): def load_variables(config_file="config.yaml"): - # Load variables from yaml file if it exists + """Load variables from yaml file if it exists, otherwise prompt the user for input""" try: with open(config_file) as file: config = yaml.load(file, Loader=yaml.FullLoader) @@ -159,6 +161,7 @@ def load_variables(config_file="config.yaml"): def construct_prompt(): + """Construct the prompt for the AI to respond to""" config = AIConfig.load() if config.ai_name: print_to_console( @@ -187,6 +190,7 @@ Continue (y/n): """) def prompt_user(): + """Prompt the user for input""" ai_name = "" # Construct the prompt print_to_console( @@ -239,6 +243,7 @@ def prompt_user(): return config def parse_arguments(): + """Parses the arguments passed to the script""" global cfg cfg.set_continuous_mode(False) cfg.set_speak_mode(False) diff --git a/scripts/speak.py b/scripts/speak.py index 13517d36..10dd7c07 100644 --- a/scripts/speak.py +++ b/scripts/speak.py @@ -15,6 +15,7 @@ tts_headers = { } def eleven_labs_speech(text, voice_index=0): + """Speak text using elevenlabs.io's API""" tts_url = "https://api.elevenlabs.io/v1/text-to-speech/{voice_id}".format( voice_id=voices[voice_index]) formatted_message = {"text": text} diff --git a/scripts/spinner.py b/scripts/spinner.py index 2a48dfec..df39dbbd 100644 --- a/scripts/spinner.py +++ b/scripts/spinner.py @@ -5,7 +5,9 @@ import time class Spinner: + """A simple spinner class""" def __init__(self, message="Loading...", delay=0.1): + """Initialize the spinner class""" self.spinner = itertools.cycle(['-', '/', '|', '\\']) self.delay = delay self.message = message @@ -13,6 +15,7 @@ class Spinner: self.spinner_thread = None def spin(self): + """Spin the spinner""" while self.running: sys.stdout.write(next(self.spinner) + " " + self.message + "\r") sys.stdout.flush() @@ -20,11 +23,13 @@ class Spinner: sys.stdout.write('\b' * (len(self.message) + 2)) def __enter__(self): + """Start the spinner""" self.running = True self.spinner_thread = threading.Thread(target=self.spin) self.spinner_thread.start() def __exit__(self, exc_type, exc_value, exc_traceback): + """Stop the spinner""" self.running = False self.spinner_thread.join() sys.stdout.write('\r' + ' ' * (len(self.message) + 2) + '\r')